commit 30a0f1beb7fde5f9cac156e9cb57df2dbaa60fca Author: Kilian Schuettler Date: Fri Jan 31 01:44:54 2025 +0100 initial-commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..93a18db --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +.DS_Store +node_modules +/build +/.svelte-kit +/package +.env +.env.* +!.env.example +vite.config.js.timestamp-* +vite.config.ts.timestamp-* +.idea +.vscode +**/target/** +dist diff --git a/package.json b/package.json new file mode 100644 index 0000000..2dc70d2 --- /dev/null +++ b/package.json @@ -0,0 +1,43 @@ +{ + "name": "pdf-forge", + "version": "0.1.0", + "description": "", + "type": "module", + "scripts": { + "dev": "vite dev", + "build": "vite build", + "preview": "vite preview", + "check": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json", + "check:watch": "svelte-kit sync && svelte-check --tsconfig ./tsconfig.json --watch", + "tauri": "tauri" + }, + "license": "MIT", + "dependencies": { + "@geoffcox/svelte-splitter": "^1.0.1", + "@tauri-apps/api": "^2", + "@tauri-apps/plugin-dialog": "~2", + "@tauri-apps/plugin-fs": "~2", + "@tauri-apps/plugin-opener": "^2", + "flowbite-svelte": "^0.47.4", + "flowbite-svelte-icons": "^2.0.2", + "paths": "^0.1.1", + "svelte-split-pane": "^0.1.2", + "svelte-splitpanes": "^8.0.9" + }, + "devDependencies": { + "@sveltejs/adapter-static": "^3.0.6", + "@sveltejs/kit": "^2.9.0", + "@sveltejs/vite-plugin-svelte": "^5.0.0", + "@tailwindcss/typography": "^0.5.14", + "@tauri-apps/cli": "^2", + "autoprefixer": "^10.4.20", + "postcss": "^8.5.1", + "sass-embedded": "^1.83.4", + "svelte": "^5.0.0", + "svelte-check": "^4.0.0", + "svelte-preprocess": "^6.0.3", + "tailwindcss": "^3.4.17", + "typescript": "~5.6.2", + "vite": "^6.0.3" + } +} diff --git a/postcss.config.js b/postcss.config.js new file mode 100644 index 0000000..ba80730 --- /dev/null +++ b/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {} + } +}; diff --git a/src-pdfrs/Cargo.lock b/src-pdfrs/Cargo.lock new file mode 100644 index 0000000..b677bd2 --- /dev/null +++ b/src-pdfrs/Cargo.lock @@ -0,0 +1,1731 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aligned-vec" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4aa90d7ce82d4be67b64039a3d588d38dbcc6736577de4a847025ce5b0c468d1" + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "anstream" +version = "0.6.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" + +[[package]] +name = "anstyle-parse" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3534e77181a9cc07539ad51f2141fe32f6c3ffd4df76db8ad92346b003ae4e" +dependencies = [ + "anstyle", + "once_cell", + "windows-sys", +] + +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" + +[[package]] +name = "arg_enum_proc_macro" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ae92a5119aa49cdbcf6b9f893fe4e1d98b04ccbf82ee0584ad948a44a734dea" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + +[[package]] +name = "async-trait" +version = "0.1.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "av1-grain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6678909d8c5d46a42abcf571271e15fdbc0a225e3646cf23762cd415046c78bf" +dependencies = [ + "anyhow", + "arrayvec", + "log", + "nom", + "num-rational", + "v_frame", +] + +[[package]] +name = "avif-serialize" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e335041290c43101ca215eed6f43ec437eb5a42125573f600fc3fa42b9bddd62" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "bit_field" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" + +[[package]] +name = "bitstream-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "built" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c360505aed52b7ec96a3636c3f039d99103c37d1d9b4f7a8c743d3ea9ffcd03b" + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytemuck" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "byteorder-lite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-expr" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" +dependencies = [ + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "clap" +version = "4.5.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8eb5e908ef3a6efbe1ed62520fb7287959888c88485abe072543190ecc66783" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96b01801b5fc6a0a232407abc821660c9c6d25a1cafc0d4f85f29fb8d9afc121" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b755194d6389280185988721fffba69495eed5ee9feeee9a599b53db80318c" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "clap_lex" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "color_quant" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d7b894f5411737b7867f4827955924d7c254fc9f4d91a6aad6b097804b1018b" + +[[package]] +name = "colorchoice" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crunchy" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "dary_heap" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728" + +[[package]] +name = "datasize" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e65c07d59e45d77a8bda53458c24a828893a99ac6cdd9c84111e09176ab739a2" +dependencies = [ + "datasize_derive", +] + +[[package]] +name = "datasize_derive" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613e4ee15899913285b7612004bbd490abd605be7b11d35afada5902fb6b91d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "deflate" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86f7e25f518f4b81808a2cf1c50996a61f5c2eb394b2393bd87f2a4780a432f" +dependencies = [ + "adler32", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys", +] + +[[package]] +name = "euclid" +version = "0.22.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad9cdb4b747e485a12abb0e6566612956c7a1bafa3bdb8d682c5b6d403589e48" +dependencies = [ + "num-traits", +] + +[[package]] +name = "exr" +version = "1.73.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83197f59927b46c04a183a619b7c29df34e63e63c7869320862268c0ef687e0" +dependencies = [ + "bit_field", + "half", + "lebe", + "miniz_oxide", + "rayon-core", + "smallvec", + "zune-inflate", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fax" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03e33ad0e71af414ef9d2b0a94d23ff59115bb068e6a6a06c0952f2c22ffd77" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c1d7ffc9f2dc8316348c75281a99c8fdc60c1ddf4f82a366d117bf1b74d5a39" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gif" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fb2d69b19215e18bb912fa30f7ce15846e301408695e44e0ef719f1da9e19f2" +dependencies = [ + "color_quant", + "weezl", +] + +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "globalcache" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240a3059d86f2ba6859ac79f95ff94e65606abc775c1bc0ecf9b6590fb35dc04" +dependencies = [ + "async-trait", + "tuple", + "web-time", +] + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "image" +version = "0.25.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6f44aed642f18953a158afeb30206f4d50da59fbc66ecb53c66488de73563b" +dependencies = [ + "bytemuck", + "byteorder-lite", + "color_quant", + "exr", + "gif", + "image-webp", + "num-traits", + "png", + "qoi", + "ravif", + "rayon", + "rgb", + "tiff", + "zune-core", + "zune-jpeg", +] + +[[package]] +name = "image-webp" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b77d01e822461baa8409e156015a1d91735549f0f2c17691bd2d996bef238f7f" +dependencies = [ + "byteorder-lite", + "quick-error", +] + +[[package]] +name = "imgref" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0263a3d970d5c054ed9312c0057b4f3bde9c0b33836d3637361d4a9e6e7a408" + +[[package]] +name = "indexmap" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "interpolate_name" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34819042dc3d3971c46c2190835914dfbe0c3c13f61449b2997f4e9722dfa60" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "istring" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875cc6fb9aecbc1a9bd736f2d18b12e0756b4c80c5e35e28262154abcb077a39" +dependencies = [ + "datasize", +] + +[[package]] +name = "itertools" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "jpeg-decoder" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" +dependencies = [ + "rayon", +] + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lebe" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libflate" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" +dependencies = [ + "core2", + "hashbrown 0.14.5", + "rle-decode-fast", +] + +[[package]] +name = "libfuzzer-sys" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b9569d2f74e257076d8c6bfa73fb505b46b851e51ddaecc825944aa3bed17fa" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "log" +version = "0.4.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" + +[[package]] +name = "loop9" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fae87c125b03c1d2c0150c90365d7d6bcc53fb73a9acaef207d2d065860f062" +dependencies = [ + "imgref", +] + +[[package]] +name = "maybe-rayon" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea1f30cedd69f0a2954655f7188c6a834246d2bcf1e315e2ac40c4b24dc9519" +dependencies = [ + "cfg-if", + "rayon", +] + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memmap2" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd3f7eed9d3848f8b98834af67102b720745c4ec028fcd0aa0239277e7de374f" +dependencies = [ + "libc", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "noop_proc_macro" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "pdf" +version = "0.9.1" +dependencies = [ + "aes", + "bitflags 2.8.0", + "cbc", + "datasize", + "deflate", + "euclid", + "fax", + "glob", + "globalcache", + "indexmap", + "istring", + "itertools 0.13.0", + "jpeg-decoder", + "libflate", + "log", + "md5", + "memmap2", + "once_cell", + "pdf_derive", + "sha2", + "snafu", + "stringprep", + "tempfile", + "weezl", +] + +[[package]] +name = "pdf-examples" +version = "0.1.0" +dependencies = [ + "clap", + "datasize", + "image", + "pdf", +] + +[[package]] +name = "pdf_derive" +version = "0.2.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "profiling" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afbdc74edc00b6f6a218ca6a5364d6226a259d4b8ea1af4a0ea063f27e179f4d" +dependencies = [ + "profiling-procmacros", +] + +[[package]] +name = "profiling-procmacros" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a65f2e60fbf1063868558d69c6beacf412dc755f9fc020f514b7955fc914fe30" +dependencies = [ + "quote", + "syn 2.0.96", +] + +[[package]] +name = "qoi" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6d64c71eb498fe9eae14ce4ec935c555749aef511cca85b5568910d6e48001" +dependencies = [ + "bytemuck", +] + +[[package]] +name = "quick-error" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3" + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rav1e" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd87ce80a7665b1cce111f8a16c1f3929f6547ce91ade6addf4ec86a8dda5ce9" +dependencies = [ + "arbitrary", + "arg_enum_proc_macro", + "arrayvec", + "av1-grain", + "bitstream-io", + "built", + "cfg-if", + "interpolate_name", + "itertools 0.12.1", + "libc", + "libfuzzer-sys", + "log", + "maybe-rayon", + "new_debug_unreachable", + "noop_proc_macro", + "num-derive", + "num-traits", + "once_cell", + "paste", + "profiling", + "rand", + "rand_chacha", + "simd_helpers", + "system-deps", + "thiserror", + "v_frame", + "wasm-bindgen", +] + +[[package]] +name = "ravif" +version = "0.11.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2413fd96bd0ea5cdeeb37eaf446a22e6ed7b981d792828721e74ded1980a45c6" +dependencies = [ + "avif-serialize", + "imgref", + "loop9", + "quick-error", + "rav1e", + "rayon", + "rgb", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "rgb" +version = "0.8.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57397d16646700483b67d2dd6511d79318f9d057fdbd21a4066aeac8b41d310a" + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "rustix" +version = "0.38.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" +dependencies = [ + "bitflags 2.8.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys", +] + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "simd_helpers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95890f873bec569a0362c235787f3aca6e1e887302ba4840839bcc6459c42da6" +dependencies = [ + "quote", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "snafu" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "system-deps" +version = "6.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" +dependencies = [ + "cfg-expr", + "heck", + "pkg-config", + "toml", + "version-compare", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "tempfile" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom", + "once_cell", + "rustix", + "windows-sys", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "tiff" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" +dependencies = [ + "flate2", + "jpeg-decoder", + "weezl", +] + +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "toml" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "winnow", +] + +[[package]] +name = "tuple" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb9f6bd73479481158ba8ee3edf17aca93354623d13f02e96a2014fdbc1c37e" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "v_frame" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6f32aaa24bacd11e488aa9ba66369c7cd514885742c9fe08cfe85884db3e92b" +dependencies = [ + "aligned-vec", + "num-traits", + "wasm-bindgen", +] + +[[package]] +name = "version-compare" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "weezl" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "winnow" +version = "0.6.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a" +dependencies = [ + "memchr", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zune-core" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a" + +[[package]] +name = "zune-inflate" +version = "0.2.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73ab332fe2f6680068f3582b16a24f90ad7096d5d39b974d1c0aff0125116f02" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "zune-jpeg" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99a5bab8d7dedf81405c4bb1f2b83ea057643d9cb28778cea9eecddeedd2e028" +dependencies = [ + "zune-core", +] diff --git a/src-pdfrs/Cargo.toml b/src-pdfrs/Cargo.toml new file mode 100644 index 0000000..2e83206 --- /dev/null +++ b/src-pdfrs/Cargo.toml @@ -0,0 +1,6 @@ +[workspace] +members = [ + "pdf", + "pdf_derive", + "examples", +] diff --git a/src-pdfrs/LICENSE b/src-pdfrs/LICENSE new file mode 100644 index 0000000..19c67c2 --- /dev/null +++ b/src-pdfrs/LICENSE @@ -0,0 +1,7 @@ +Copyright © 2020 The pdf-rs contributers. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src-pdfrs/README.md b/src-pdfrs/README.md new file mode 100644 index 0000000..84ced90 --- /dev/null +++ b/src-pdfrs/README.md @@ -0,0 +1,25 @@ +# pdf-rs [![test](https://github.com/pdf-rs/pdf/actions/workflows/test.yml/badge.svg)](https://github.com/pdf-rs/pdf/actions/workflows/test.yml) +Read, alter and write PDF files. + +Modifying and writing PDFs is still experimental. + +One easy way you can contribute is to add different PDF files to `tests/files` and see if they pass the tests (`cargo test`). + +Feel free to contribute with ideas, issues or code! Please join [us on Zulip](https://type.zulipchat.com/#narrow/stream/209232-pdf) if you have any questions or problems. + +# Workspace +This repository uses a Cargo Workspace and default members. This means by default only the `pdf` library is build. +To build additional parts, pass `--package=read` to build the subcrate you are interested in (here the `read` example). + +# Examples +Examples are located in `pdf/examples/` and can be executed using: + +``` +cargo run --example {content,metadata,names,read,text} -- +``` + +# Renderer and Viewer +A library for rendering PDFs via [Pathfinder](https://github.com/servo/pathfinder) and minimal viewer can be found [here](https://github.com/pdf-rs/pdf_render). + +# Inspect +There is a tool for visualizing a PDF file as an interactive hierarchy of primitives at [inspect-prim](https://github.com/pdf-rs/inspect-prim). Just clone and `cargo run`. diff --git a/src-pdfrs/examples/Cargo.toml b/src-pdfrs/examples/Cargo.toml new file mode 100644 index 0000000..f194f14 --- /dev/null +++ b/src-pdfrs/examples/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "pdf-examples" +version = "0.1.0" +edition = "2021" +publish = false + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +pdf = { path = "../pdf" } +datasize = "0.2.13" +clap = { version = "*", features = ["derive"] } +image = "*" + +[[bin]] +name = "extract_page" diff --git a/src-pdfrs/examples/src/bin/add_image.rs b/src-pdfrs/examples/src/bin/add_image.rs new file mode 100644 index 0000000..40c980d --- /dev/null +++ b/src-pdfrs/examples/src/bin/add_image.rs @@ -0,0 +1,125 @@ +use std::{path::PathBuf, error::Error}; + +use pdf::{ + file::FileOptions, + object::*, + primitive::Name, enc::{StreamFilter, DCTDecodeParams}, content::{Op, Matrix, Content}, +}; + +use clap::Parser; +use std::io::Cursor; +use image::io::Reader as ImageReader; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// Input PDF file + #[arg(short, long)] + input: PathBuf, + + /// Input image file + #[arg(long)] + image: PathBuf, + + /// Page number to add the image to + #[arg(short, long, default_value_t = 0)] + page: u32, + + /// Output file + #[arg(short, long)] + output: PathBuf, +} + +struct Point { + x: f32, + y: f32 +} +struct Align { + page_rel: f32, + page_abs: f32, + img_rel: f32, +} + +fn main() -> Result<(), Box> { + let args = Args::parse(); + + let img_data = std::fs::read(&args.image)?; + let img = ImageReader::with_format(Cursor::new(&img_data), image::ImageFormat::Jpeg).decode()?; + let image_dict = ImageDict { + width: img.width(), + height: img.height(), + color_space: Some(ColorSpace::DeviceRGB), + bits_per_component: Some(8), + .. Default::default() + }; + let image = Stream::new_with_filters(image_dict, img_data, vec![StreamFilter::DCTDecode(DCTDecodeParams { color_transform: None})]); + + let mut file = FileOptions::cached().open(&args.input).unwrap(); + let page = file.get_page(args.page).expect("no such page"); + + let resources = page.resources()?; + let mut resources2: Resources = (**resources).clone(); + + let image_obj = XObject::Image(ImageXObject { inner: image }); + let image_ref = file.create(image_obj)?; + + // assume that name did not exist + let image_name = Name::from("MyImage"); + resources2.xobjects.insert(image_name.clone(), image_ref.get_ref()); + + + let mut ops = page.contents.as_ref().unwrap().operations(&file.resolver())?; + + let mm = 72.0 / 25.4; // one millimeter + // bottom right corner of the page, but 5mm margin + let h_align = Align { + img_rel: -1.0, // move left by image width + page_rel: 1.0, // move right by page width + page_abs: -5.0 * mm, // 5,mm from the right edge + }; + let v_align = Align { + img_rel: 0.0, + page_rel: 0.0, + page_abs: 5.0 * mm + }; + let dpi = 300.; + + let px_scale = 72. / dpi; + let media_box = page.media_box.unwrap(); + let scale = Point { x: img.width() as f32 * px_scale , y: img.height() as f32 * px_scale }; + let skew = Point { x: 0.0, y: 0.0 }; + let page_size = Point { + x: media_box.right - media_box.left, + y: media_box.top - media_box.bottom + }; + let page_origin = Point { + x: media_box.left, + y: media_box.bottom + }; + + let position = Point { + x: page_origin.x + h_align.page_abs + h_align.img_rel * scale.x + h_align.page_rel * page_size.x, + y: page_origin.y + v_align.page_abs + v_align.img_rel * scale.y + v_align.page_rel * page_size.y + }; + + ops.append(&mut vec![ + Op::Save, // ADD IMAGE START + Op::Transform { matrix: Matrix{ // IMAGE MANIPULATION + a: scale.x, d: scale.y, + b: skew.x, c: skew.y, + e: position.x, f: position.y, + } }, + Op::XObject {name: image_name}, // IMAGE + Op::Restore, // ADD IMAGE STOP + ]); + + let mut page2: Page = (*page).clone(); + page2.contents = Some(Content::from_ops(ops)); + page2.resources = Some(file.create(resources2)?.into()); + + file.update(page.get_ref().get_inner(), page2)?; + + file.save_to(&args.output)?; + + Ok(()) +} diff --git a/src-pdfrs/examples/src/bin/extract_page.rs b/src-pdfrs/examples/src/bin/extract_page.rs new file mode 100644 index 0000000..b9637f9 --- /dev/null +++ b/src-pdfrs/examples/src/bin/extract_page.rs @@ -0,0 +1,84 @@ +use std::path::PathBuf; + +use pdf::{ + error::PdfError, + file::FileOptions, + object::*, + build::*, + primitive::{PdfString, Name}, content::{Op, Color, Cmyk, Matrix}, font::{Font, TFont, FontData}, +}; + +use clap::Parser; + +#[derive(Parser, Debug)] +#[command(author, version, about, long_about = None)] +struct Args { + /// Input file + #[arg(short, long)] + input: PathBuf, + + /// Page number + #[arg(short, long, default_value_t = 0)] + page: u32, + + /// Output file + #[arg(short, long)] + output: PathBuf, +} + +fn main() -> Result<(), PdfError> { + let args = Args::parse(); + + let old_file = FileOptions::cached().open(&args.input).unwrap(); + let old_page = old_file.get_page(args.page).expect("no such page"); + + let mut builder = PdfBuilder::new(FileOptions::cached()); + + let mut importer = Importer::new(old_file.resolver(), &mut builder.storage); + let mut pages = Vec::new(); + + let mut new_page = PageBuilder::clone_page(&old_page, &mut importer)?; + importer.finish().verify(&builder.storage.resolver())?; + + let font = Font { + data: FontData::TrueType(TFont{ + base_font: Some(Name::from("Helvetica")), + first_char: None, + font_descriptor: None, + last_char: None, + widths: None, + }), + encoding: Some(pdf::encoding::Encoding::standard()), + name: None, + subtype: pdf::font::FontType::TrueType, + to_unicode: None, + _other: Default::default() + }; + let font_name = Name::from("F42"); + new_page.resources.fonts.insert(font_name.clone(), builder.storage.create(font)?.into()); + + new_page.ops.push(Op::BeginText); + let label = format!("{} page {}", args.input.file_name().unwrap().to_string_lossy(), args.page).into_bytes(); + let mut text_ops = vec![ + Op::FillColor { color: Color::Cmyk(Cmyk { cyan: 0.0, magenta: 0.0, key: 1.0, yellow: 0.0})}, + Op::BeginText, + Op::SetTextMatrix { matrix: Matrix { a: 1.0, b: 0.0, c: 0.0, d: 1., e: 10., f: 10. }}, + Op::TextFont { name: font_name.clone(), size: 20. }, + Op::TextDraw { text: PdfString::new(label.into()) }, + Op::EndText + ]; + new_page.ops.append(&mut text_ops); + + pages.push(new_page); + + let catalog = CatalogBuilder::from_pages(pages); + + let mut info = InfoDict::default(); + info.title = Some(PdfString::from("test")); + + let data = builder.info(info).build(catalog)?; + + std::fs::write(&args.output, data)?; + + Ok(()) +} diff --git a/src-pdfrs/examples/src/bin/form.rs b/src-pdfrs/examples/src/bin/form.rs new file mode 100644 index 0000000..23e2d26 --- /dev/null +++ b/src-pdfrs/examples/src/bin/form.rs @@ -0,0 +1,125 @@ +extern crate pdf; + +use std::collections::HashMap; +use std::env::args; + +use pdf::content::{FormXObject, Op, serialize_ops}; +use pdf::error::PdfError; +use pdf::file::{FileOptions, Log}; +use pdf::font::{Font, FontData, TFont}; +use pdf::object::*; +use pdf::primitive::{PdfString, Primitive, Name}; + +fn run() -> Result<(), PdfError> { + let path = args().nth(1).expect("no file given"); + println!("read: {}", path); + + let mut file = FileOptions::cached().open(&path)?; + let mut to_update_field: Option<_> = None; + + + let font = Font { + data: FontData::TrueType(TFont{ + base_font: Some(Name::from("Helvetica")), + first_char: None, + font_descriptor: None, + last_char: None, + widths: None, + }), + encoding: Some(pdf::encoding::Encoding::standard()), + name: None, + subtype: pdf::font::FontType::TrueType, + to_unicode: None, + _other: Default::default() + }; + let font_name = Name::from("Helvetica"); + let font = file.create(font)?; + let mut fonts = HashMap::new(); + fonts.insert("Helvetica".into(), font.into()); + let resources = Resources { + fonts, + .. Default::default() + }; + let resources = file.create(resources)?; + + let page0 = file.get_page(0).unwrap(); + let annots = page0.annotations.load(&file.resolver()).expect("can't load annotations"); + for annot in &*annots { + if let Some(ref a) = annot.appearance_streams { + let normal = file.resolver().get(a.normal); + if let Ok(normal) = normal { + match *normal { + AppearanceStreamEntry::Single(ref s) => { + //dbg!(&s.stream.resources); + + let form_dict = FormDict { + resources: Some(resources.clone().into()), + .. (**s.stream).clone() + }; + + let ops = vec![ + Op::Save, + Op::TextFont { name: font_name.clone(), size: 14.0 }, + Op::TextDraw { text: PdfString::from("Hello World!") }, + Op::EndText, + Op::Restore + ]; + let stream = Stream::new(form_dict, serialize_ops(&ops)?); + + let normal2 = AppearanceStreamEntry::Single(FormXObject { stream }); + + file.update(a.normal.get_inner(), normal2)?; + } + _ => {} + } + } + } + } + + if let Some(ref forms) = file.get_root().forms { + println!("Forms:"); + for field in forms.fields.iter().take(1) { + print!(" {:?} = ", field.name); + match field.value { + Primitive::String(ref s) => println!("{}", s.to_string_lossy()), + Primitive::Integer(i) => println!("{}", i), + Primitive::Name(ref s) => println!("{}", s), + ref p => println!("{:?}", p), + } + + if to_update_field.is_none() { + to_update_field = Some(field.clone()); + } + } + } + + if let Some(to_update_field) = to_update_field { + println!("\nUpdating field:"); + println!("{:?}\n", to_update_field); + + let text = "Hello World!"; + let new_value: PdfString = PdfString::new(text.into()); + let mut updated_field = (*to_update_field).clone(); + updated_field.value = Primitive::String(new_value); + + //dbg!(&updated_field); + + let reference = file.update( + to_update_field.get_ref().get_inner(), + updated_field, + )?; + + file.save_to("output/out.pdf")?; + + println!("\nUpdated field:"); + //println!("{:?}\n", reference); + } + + Ok(()) +} + +fn main() { + if let Err(e) = run() { + println!("{e}"); + } +} diff --git a/src-pdfrs/files/encrypted_aes_128.pdf b/src-pdfrs/files/encrypted_aes_128.pdf new file mode 100644 index 0000000..d8ffb75 --- /dev/null +++ b/src-pdfrs/files/encrypted_aes_128.pdf @@ -0,0 +1,46 @@ +%PDF-1.5 +1 0 obj +<> +endobj +2 0 obj +<> +endobj +3 0 obj +<>stream +DMeXpJDO9I [r( l/p +``r룆+j֍W$TLcq`օ& ͵ +8O$J5[Y )BwW[?⩰eO\,5|NWexp'}3p$l(KM^N^D a> +endobj +6 0 obj +<>>>/MediaBox [ 0 0 180 240 ]/Contents 3 0 R>> +endobj +7 0 obj +<>>>/StrF /StdCF/StmF /StdCF/O (6Eӝu;|,\(fZ5?4Sh\\W)/U (3 ]i9)>> +endobj +xref +0 8 +0000000000 65535 f +0000000010 00000 n +0000000059 00000 n +0000000117 00000 n +0000000330 00000 n +0000000352 00000 n +0000000449 00000 n +0000000570 00000 n +trailer +<< + /Size 8 + /Root 1 0 R + /ID [ ] + /Encrypt 7 0 R +>> +startxref +814 +%%EOF diff --git a/src-pdfrs/files/encrypted_aes_256.pdf b/src-pdfrs/files/encrypted_aes_256.pdf new file mode 100644 index 0000000..35a3375 Binary files /dev/null and b/src-pdfrs/files/encrypted_aes_256.pdf differ diff --git a/src-pdfrs/files/encrypted_aes_256_hardened.pdf b/src-pdfrs/files/encrypted_aes_256_hardened.pdf new file mode 100644 index 0000000..e1e61c4 Binary files /dev/null and b/src-pdfrs/files/encrypted_aes_256_hardened.pdf differ diff --git a/src-pdfrs/files/encrypted_rc4_rev2.pdf b/src-pdfrs/files/encrypted_rc4_rev2.pdf new file mode 100644 index 0000000..2f5fd3a Binary files /dev/null and b/src-pdfrs/files/encrypted_rc4_rev2.pdf differ diff --git a/src-pdfrs/files/encrypted_rc4_rev3.pdf b/src-pdfrs/files/encrypted_rc4_rev3.pdf new file mode 100644 index 0000000..d6e9b11 --- /dev/null +++ b/src-pdfrs/files/encrypted_rc4_rev3.pdf @@ -0,0 +1,44 @@ +%PDF-1.4 +1 0 obj +<> +endobj +2 0 obj +<> +endobj +3 0 obj +<>stream + *B,)0?H1.%R,m={xXD^ǒʑV^̃v))ިS,xZnTj  p[е~`nTGVSnKWK +endstream +endobj +4 0 obj +132 +endobj +5 0 obj +<> +endobj +6 0 obj +<>>>/MediaBox [ 0 0 180 240 ]/Contents 3 0 R>> +endobj +7 0 obj +<> +endobj +xref +0 8 +0000000000 65535 f +0000000010 00000 n +0000000059 00000 n +0000000117 00000 n +0000000301 00000 n +0000000323 00000 n +0000000420 00000 n +0000000541 00000 n +trailer +<< + /Size 8 + /Root 1 0 R + /ID [ ] + /Encrypt 7 0 R +>> +startxref +678 +%%EOF diff --git a/src-pdfrs/files/ep.pdf b/src-pdfrs/files/ep.pdf new file mode 100644 index 0000000..a106176 Binary files /dev/null and b/src-pdfrs/files/ep.pdf differ diff --git a/src-pdfrs/files/ep2.pdf b/src-pdfrs/files/ep2.pdf new file mode 100644 index 0000000..df5ad82 Binary files /dev/null and b/src-pdfrs/files/ep2.pdf differ diff --git a/src-pdfrs/files/example.pdf b/src-pdfrs/files/example.pdf new file mode 100644 index 0000000..d736ded --- /dev/null +++ b/src-pdfrs/files/example.pdf @@ -0,0 +1,57 @@ +%PDF-1.7 +% +3 0 obj +<< /Length 4 0 R >> +stream +/DeviceRGB cs /DeviceRGB CS +0 0 0.972549 SC +21.68 194 136.64 26 re +10 10 m 20 20 l S +BT +/F0 24 Tf +25.68 200 Td +(Hello World!) Tj +ET +endstream +endobj +4 0 obj +132 +endobj +5 0 obj +<< /Type /Font /Subtype /Type1 /BaseFont /Times-Roman /Encoding /WinAnsiEncoding >> +endobj +6 0 obj +<< /Type /Page + /Parent 2 0 R + /Resources << /Font << /F0 5 0 R >> >> + /MediaBox [ 0 0 180 240 ] + /Contents 3 0 R +>> +endobj +2 0 obj +<< /Type /Pages + /Count 1 + /Kids [ 6 0 R ] +>> +endobj +1 0 obj +<< /Type /Catalog + /Pages 2 0 R +>> +endobj +xref +0 7 +0000000000 65535 f +0000000522 00000 n +0000000457 00000 n +0000000015 00000 n +0000000199 00000 n +0000000218 00000 n +0000000317 00000 n +trailer +<< /Size 7 + /Root 1 0 R +>> +startxref +574 +%%EOF diff --git a/src-pdfrs/files/example_annotation.pdf b/src-pdfrs/files/example_annotation.pdf new file mode 100644 index 0000000..bdd2d0c Binary files /dev/null and b/src-pdfrs/files/example_annotation.pdf differ diff --git a/src-pdfrs/files/formxobject.pdf b/src-pdfrs/files/formxobject.pdf new file mode 100644 index 0000000..aad2b45 --- /dev/null +++ b/src-pdfrs/files/formxobject.pdf @@ -0,0 +1,70 @@ +%PDF-1.7 +% +1 0 obj +<< /Type /Catalog + /Pages 2 0 R +>> +endobj +2 0 obj +<< /Kids [3 0 R] + /Type /Pages + /Count 1 +>> +endobj +3 0 obj +<< /Contents 4 0 R + /Type /Page + /Resources << /XObject << /Im0 5 0 R >> >> + /Parent 2 0 R + /MediaBox [0 0 180 240] +>> +endobj +4 0 obj +<< /Length 93 >> +stream +/DeviceRGB cs /DeviceRGB CS +0 0 0.972549 SC +21.68 194 136.64 26 re +10 10 m 20 20 l S +/Im0 Do +endstream +endobj +5 0 obj +<< /Subtype /Form + /Type /XObject + /FormType 1 + /Resources << /Font << /F0 6 0 R >> >> + /BBox [0 0 180 240] + /Length 47 +>> +stream +BT +/F0 24 Tf +25.68 200 Td +(Hello World!) Tj +ET +endstream +endobj +6 0 obj +<< /Subtype /Type1 + /Type /Font + /BaseFont /Times-Roman + /Encoding /WinAnsiEncoding +>> +endobj +xref +0 7 +0000000000 65535 f +0000000015 00000 n +0000000067 00000 n +0000000130 00000 n +0000000272 00000 n +0000000414 00000 n +0000000626 00000 n +trailer +<< /Root 1 0 R + /Size 7 +>> +startxref +734 +%%EOF diff --git a/src-pdfrs/files/invalid/crash-121-1.pdf b/src-pdfrs/files/invalid/crash-121-1.pdf new file mode 100644 index 0000000..33c7f3b Binary files /dev/null and b/src-pdfrs/files/invalid/crash-121-1.pdf differ diff --git a/src-pdfrs/files/invalid/crash-121-2.pdf b/src-pdfrs/files/invalid/crash-121-2.pdf new file mode 100644 index 0000000..5d5186e Binary files /dev/null and b/src-pdfrs/files/invalid/crash-121-2.pdf differ diff --git a/src-pdfrs/files/invalid/crash-121-3.pdf b/src-pdfrs/files/invalid/crash-121-3.pdf new file mode 100644 index 0000000..5667269 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-121-3.pdf differ diff --git a/src-pdfrs/files/invalid/crash-121-4.pdf b/src-pdfrs/files/invalid/crash-121-4.pdf new file mode 100644 index 0000000..2e0b878 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-121-4.pdf differ diff --git a/src-pdfrs/files/invalid/crash-122.pdf b/src-pdfrs/files/invalid/crash-122.pdf new file mode 100644 index 0000000..bc3b9e7 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-122.pdf differ diff --git a/src-pdfrs/files/invalid/crash-123.pdf b/src-pdfrs/files/invalid/crash-123.pdf new file mode 100644 index 0000000..3852ce9 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-123.pdf differ diff --git a/src-pdfrs/files/invalid/crash-124.pdf b/src-pdfrs/files/invalid/crash-124.pdf new file mode 100644 index 0000000..66a7588 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-124.pdf differ diff --git a/src-pdfrs/files/invalid/crash-assertion-failure.pdf b/src-pdfrs/files/invalid/crash-assertion-failure.pdf new file mode 100644 index 0000000..a9dd373 Binary files /dev/null and b/src-pdfrs/files/invalid/crash-assertion-failure.pdf differ diff --git a/src-pdfrs/files/invalid/infinite-loop-103.pdf b/src-pdfrs/files/invalid/infinite-loop-103.pdf new file mode 100644 index 0000000..3af3635 --- /dev/null +++ b/src-pdfrs/files/invalid/infinite-loop-103.pdf @@ -0,0 +1 @@ +startxref%PDF- \ No newline at end of file diff --git a/src-pdfrs/files/jpeg.pdf b/src-pdfrs/files/jpeg.pdf new file mode 100644 index 0000000..3b4df43 Binary files /dev/null and b/src-pdfrs/files/jpeg.pdf differ diff --git a/src-pdfrs/files/libreoffice.pdf b/src-pdfrs/files/libreoffice.pdf new file mode 100644 index 0000000..f3e6f0d Binary files /dev/null and b/src-pdfrs/files/libreoffice.pdf differ diff --git a/src-pdfrs/files/lossless.pdf b/src-pdfrs/files/lossless.pdf new file mode 100644 index 0000000..0de9555 Binary files /dev/null and b/src-pdfrs/files/lossless.pdf differ diff --git a/src-pdfrs/files/offset.pdf b/src-pdfrs/files/offset.pdf new file mode 100644 index 0000000..e3b649d --- /dev/null +++ b/src-pdfrs/files/offset.pdf @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + +%PDF-1.7 +% +3 0 obj +<< /Length 4 0 R >> +stream +/DeviceRGB cs /DeviceRGB CS +0 0 0.972549 SC +21.68 194 136.64 26 re +10 10 m 20 20 l S +BT +/F0 24 Tf +25.68 200 Td +(Hello World!) Tj +ET +endstream +endobj +4 0 obj +132 +endobj +5 0 obj +<< /Type /Font /Subtype /Type1 /BaseFont /Times-Roman /Encoding /WinAnsiEncoding >> +endobj +6 0 obj +<< /Type /Page + /Parent 2 0 R + /Resources << /Font << /F0 5 0 R >> >> + /MediaBox [ 0 0 180 240 ] + /Contents 3 0 R +>> +endobj +2 0 obj +<< /Type /Pages + /Count 1 + /Kids [ 6 0 R ] +>> +endobj +1 0 obj +<< /Type /Catalog + /Pages 2 0 R +>> +endobj +xref +0 7 +0000000000 65535 f +0000000522 00000 n +0000000457 00000 n +0000000015 00000 n +0000000199 00000 n +0000000218 00000 n +0000000317 00000 n +trailer +<< /Size 7 + /Root 1 0 R +>> +startxref +574 +%%EOF diff --git a/src-pdfrs/files/password_protected/passwords_aes_128.pdf b/src-pdfrs/files/password_protected/passwords_aes_128.pdf new file mode 100644 index 0000000..a9ca960 Binary files /dev/null and b/src-pdfrs/files/password_protected/passwords_aes_128.pdf differ diff --git a/src-pdfrs/files/password_protected/passwords_aes_256.pdf b/src-pdfrs/files/password_protected/passwords_aes_256.pdf new file mode 100644 index 0000000..63d160b Binary files /dev/null and b/src-pdfrs/files/password_protected/passwords_aes_256.pdf differ diff --git a/src-pdfrs/files/password_protected/passwords_aes_256_hardened.pdf b/src-pdfrs/files/password_protected/passwords_aes_256_hardened.pdf new file mode 100644 index 0000000..71a8a4f Binary files /dev/null and b/src-pdfrs/files/password_protected/passwords_aes_256_hardened.pdf differ diff --git a/src-pdfrs/files/password_protected/passwords_rc4_rev2.pdf b/src-pdfrs/files/password_protected/passwords_rc4_rev2.pdf new file mode 100644 index 0000000..690f37b Binary files /dev/null and b/src-pdfrs/files/password_protected/passwords_rc4_rev2.pdf differ diff --git a/src-pdfrs/files/password_protected/passwords_rc4_rev3.pdf b/src-pdfrs/files/password_protected/passwords_rc4_rev3.pdf new file mode 100644 index 0000000..6fc2e54 --- /dev/null +++ b/src-pdfrs/files/password_protected/passwords_rc4_rev3.pdf @@ -0,0 +1,44 @@ +%PDF-1.4 +1 0 obj +<> +endobj +2 0 obj +<> +endobj +3 0 obj +<>stream ++Z(ՆI0hoo'^^*)ut8&[O7sjtS-hFY҅Бrc0yͳk9N0 lhFg:SЭ^2EfD9ş +endstream +endobj +4 0 obj +132 +endobj +5 0 obj +<> +endobj +6 0 obj +<>>>/MediaBox [ 0 0 180 240 ]/Contents 3 0 R>> +endobj +7 0 obj +<> +endobj +xref +0 8 +0000000000 65535 f +0000000010 00000 n +0000000059 00000 n +0000000117 00000 n +0000000301 00000 n +0000000323 00000 n +0000000420 00000 n +0000000541 00000 n +trailer +<< + /Size 8 + /Root 1 0 R + /ID [ ] + /Encrypt 7 0 R +>> +startxref +680 +%%EOF diff --git a/src-pdfrs/files/pdf-sample.pdf b/src-pdfrs/files/pdf-sample.pdf new file mode 100644 index 0000000..f698ff5 Binary files /dev/null and b/src-pdfrs/files/pdf-sample.pdf differ diff --git a/src-pdfrs/files/xelatex-drawboard.pdf b/src-pdfrs/files/xelatex-drawboard.pdf new file mode 100644 index 0000000..829f9fc Binary files /dev/null and b/src-pdfrs/files/xelatex-drawboard.pdf differ diff --git a/src-pdfrs/files/xelatex.pdf b/src-pdfrs/files/xelatex.pdf new file mode 100644 index 0000000..2f02419 Binary files /dev/null and b/src-pdfrs/files/xelatex.pdf differ diff --git a/src-pdfrs/pdf/Cargo.toml b/src-pdfrs/pdf/Cargo.toml new file mode 100644 index 0000000..60b18b3 --- /dev/null +++ b/src-pdfrs/pdf/Cargo.toml @@ -0,0 +1,65 @@ +[package] +name = "pdf" +version = "0.9.1" +authors = ["Erlend Langseth <3rlendhl@gmail.com>", "Sebastian Köln "] +repository = "https://github.com/pdf-rs/pdf" +readme = "../README.md" +keywords = ["pdf"] +license = "MIT" +documentation = "https://docs.rs/pdf" +edition = "2018" +description = "PDF reader" + +[features] +mmap = ["memmap2"] +dump = ["tempfile"] +threads = ["jpeg-decoder/default"] +sync = [] +cache = ["globalcache"] +default = ["sync", "cache"] + +[dependencies] +pdf_derive = { version = "0.2.0", path = "../pdf_derive" } +snafu = "0.8.3" +libflate = "2.0.0" +deflate = "1.0.0" +itertools = "0.13.0" +memmap2 = { version = "0.9.4", optional = true } +weezl = "0.1.4" +once_cell = "1.5.2" +log = "0.4.14" +tempfile = { version = "3.2.0", optional = true } +md5 = "0.7" +jpeg-decoder = { version = "0.3.0", default-features = false } +aes = "0.8.2" +cbc = "0.1" +stringprep = "0.1.2" +sha2 = "0.10.2" +fax = "0.2.0" +euclid = { version = "0.22.7", optional = true } +bitflags = "2.5" +istring = { version = "0.3.3", features = ["std", "size"] } +datasize = "0.2.13" +globalcache = { version = "0.2.3", features = ["sync"], optional = true } +indexmap = "2.1.0" + +[dev-dependencies] +glob = "0.3.0" + +[lib] +doctest = false + +[[example]] +name = "content" + +[[example]] +name = "metadata" + +[[example]] +name = "names" + +[[example]] +name = "read" + +[[example]] +name = "other_page_content" diff --git a/src-pdfrs/pdf/examples/content.rs b/src-pdfrs/pdf/examples/content.rs new file mode 100644 index 0000000..9652407 --- /dev/null +++ b/src-pdfrs/pdf/examples/content.rs @@ -0,0 +1,67 @@ +use std::env; +use std::path::PathBuf; + + +use pdf::error::PdfError; +use pdf::content::*; +use pdf::file::FileOptions; + + + + +use pdf::object::*; +use pdf::build::*; + +use pdf::primitive::PdfString; + +#[cfg(feature="cache")] +fn main() -> Result<(), PdfError> { + let path = PathBuf::from(env::args_os().nth(1).expect("no file given")); + + let mut builder = PdfBuilder::new(FileOptions::cached()); + + let mut pages = Vec::new(); + + let content = Content::from_ops(vec![ + Op::MoveTo { p: Point { x: 100., y: 100. } }, + Op::LineTo { p: Point { x: 100., y: 200. } }, + Op::LineTo { p: Point { x: 200., y: 200. } }, + Op::LineTo { p: Point { x: 200., y: 100. } }, + Op::Close, + Op::Stroke, + ]); + let mut new_page = PageBuilder::from_content(content, &NoResolve)?; + new_page.media_box = Some(pdf::object::Rectangle { + left: 0.0, + top: 0.0, + bottom: 400.0, + right: 400.0 + }); + let resources = Resources::default(); + + /* + let font = Font { + name: Some("Test".into()), + subtype: pdf::font::FontType::TrueType, + data: FontData::TrueType(TFont { + base_font: None, + + }) + } + resources.fonts.insert("f1", font); + */ + + new_page.resources = resources; + pages.push(new_page); + + let catalog = CatalogBuilder::from_pages(pages); + + let mut info = InfoDict::default(); + info.title = Some(PdfString::from("test")); + + let data = builder.info(info).build(catalog)?; + + std::fs::write(path, data)?; + + Ok(()) +} diff --git a/src-pdfrs/pdf/examples/metadata.rs b/src-pdfrs/pdf/examples/metadata.rs new file mode 100644 index 0000000..4fba7e9 --- /dev/null +++ b/src-pdfrs/pdf/examples/metadata.rs @@ -0,0 +1,38 @@ +use std::env::args; + +use pdf::error::PdfError; +use pdf::file::{FileOptions}; +use pdf::object::{FieldDictionary, FieldType, Resolve}; + +/// extract and print a PDF's metadata +#[cfg(feature="cache")] +fn main() -> Result<(), PdfError> { + let path = args() + .nth(1) + .expect("Please provide a file path to the PDF you want to explore."); + + let file = FileOptions::cached().open(&path).unwrap(); + let resolver = file.resolver(); + + if let Some(ref info) = file.trailer.info_dict { + dbg!(info); + } + + if let Some(ref forms) = file.get_root().forms { + for field in forms.fields.iter() { + print_field(field, &resolver); + } + } + + Ok(()) +} + +fn print_field(field: &FieldDictionary, resolve: &impl Resolve) { + if field.typ == Some(FieldType::Signature) { + println!("{:?}", field); + } + for &kid in field.kids.iter() { + let child = resolve.get(kid).unwrap(); + print_field(&child, resolve); + } +} \ No newline at end of file diff --git a/src-pdfrs/pdf/examples/names.rs b/src-pdfrs/pdf/examples/names.rs new file mode 100644 index 0000000..b273c72 --- /dev/null +++ b/src-pdfrs/pdf/examples/names.rs @@ -0,0 +1,129 @@ +extern crate pdf; + +use std::env::args; +use std::fmt; +use std::collections::HashMap; +use pdf::file::{FileOptions}; +use pdf::object::*; +use pdf::primitive::{Primitive, PdfString}; + +struct Indent(usize); +impl fmt::Display for Indent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for _ in 0 .. self.0 { + write!(f, " ")?; + } + Ok(()) + } +} + +fn walk_outline(r: &impl Resolve, mut node: RcRef, name_map: &impl Fn(&str) -> usize, page_map: &impl Fn(PlainRef) -> usize, depth: usize) { + let indent = Indent(depth); + loop { + if let Some(ref title) = node.title { + println!("{}title: {:?}", indent, title.to_string_lossy()); + } + if let Some(ref dest) = node.dest { + match dest { + Primitive::String(ref s) => { + let name = s.to_string_lossy(); + let page_nr = name_map(&name); + println!("{}dest: {:?} -> page nr. {:?}", indent, name, page_nr); + } + Primitive::Array(ref a) => match a[0] { + Primitive::Reference(r) => { + let page_nr = page_map(r); + println!("{}dest: {:?} -> page nr. {:?}", indent, a, page_nr); + } + _ => unimplemented!("invalid reference in array"), + } + _ => unimplemented!("invalid dest"), + } + } + if let Some(Action::Goto(MaybeNamedDest::Direct(Dest { page: Some(page), ..}))) = node.action { + let page_nr = page_map(page.get_inner()); + println!("{}action -> page nr. {:?}", indent, page_nr); + } + if let Some(ref a) = node.se { + println!("{} -> {:?}", indent, a); + } + if let Some(entry_ref) = node.first { + let entry = r.get(entry_ref).unwrap(); + walk_outline(r, entry, name_map, page_map, depth + 1); + } + if let Some(entry_ref) = node.next { + node = r.get(entry_ref).unwrap(); + continue; + } + + break; + } +} + +#[cfg(feature="cache")] +fn main() { + let path = args().nth(1).expect("no file given"); + println!("read: {}", path); + + let file = FileOptions::cached().open(&path).unwrap(); + let resolver = file.resolver(); + let catalog = file.get_root(); + + let mut pages_map: HashMap = HashMap::new(); + + let mut count = 0; + let mut dests_cb = |key: &PdfString, val: &Option| { + //println!("{:?} {:?}", key, val); + if let Some(Dest { page: Some(page), ..}) = val { + pages_map.insert(key.to_string_lossy(), page.get_inner()); + } + + count += 1; + }; + + if let Some(ref names) = catalog.names { + if let Some(ref dests) = names.dests { + dests.walk(&resolver, &mut dests_cb).unwrap(); + } + } + + let mut pages = HashMap::new(); + fn add_tree(r: &impl Resolve, pages: &mut HashMap, tree: &PageTree, current_page: &mut usize) { + for &node_ref in &tree.kids { + let node = r.get(node_ref).unwrap(); + match *node { + PagesNode::Tree(ref tree) => { + add_tree(r, pages, tree, current_page); + } + PagesNode::Leaf(ref _page) => { + pages.insert(node_ref.get_inner(), *current_page); + *current_page += 1; + } + } + } + } + add_tree(&resolver, &mut pages, &catalog.pages, &mut 0); + + let get_page_nr = |name: &str| -> usize { + let page = pages_map[name]; + pages[&page] + }; + let page_nr = |r: PlainRef| -> usize { + pages[&r] + }; + + if let Some(ref outlines) = catalog.outlines { + if let Some(entry_ref) = outlines.first { + let entry = resolver.get(entry_ref).unwrap(); + walk_outline(&resolver, entry, &get_page_nr, &page_nr, 0); + } + } + + println!("{} items", count); + + if let Some(ref labels) = catalog.page_labels { + labels.walk(&resolver, &mut |page: i32, label| { + println!("{page} -> {:?}", label); + }); + } +} diff --git a/src-pdfrs/pdf/examples/other_page_content.rs b/src-pdfrs/pdf/examples/other_page_content.rs new file mode 100644 index 0000000..52ac453 --- /dev/null +++ b/src-pdfrs/pdf/examples/other_page_content.rs @@ -0,0 +1,110 @@ +use pdf::content::ViewRect; +use pdf::error::PdfError; +use pdf::file::FileOptions; +use pdf::object::Resolve; +use pdf::primitive::{Dictionary, Primitive}; +use std::env::args; + +/// Extract data from a page entry that is under "other". +/// This example looks for stikethroughs in the annotations entry +/// and returns a Vec for the bounds of the struckthrough text. +#[cfg(feature="cache")] +fn main() -> Result<(), PdfError> { + let path = args() + .nth(1) + .expect("Please provide a file path to the PDF you want to explore."); + + let file = FileOptions::cached().open(&path).unwrap(); + let resolver = file.resolver(); + + for (i, page) in file.pages().enumerate() { + let page = page.unwrap(); + let strikethroughs = annotation_strikethrough(&page.other, &resolver)?; + println!( + "Found {} strikethrough annotations on page {}.", + strikethroughs.len(), + i + 1 + ); + for strikethrough in strikethroughs { + println!(); + println!("Struck text:"); + println!("{:#?}", strikethrough.0); + println!(); + println!("Text spans {} lines", strikethrough.1.len()); + println!(); + println!("Strikethrough bounding boxes:"); + for rect in strikethrough.1 { + println!("{:#?}", rect); + println!(); + } + println!(); + println!(); + } + } + + Ok(()) +} + +fn annotation_strikethrough( + other_dict: &Dictionary, + resolver: &impl Resolve, +) -> Result)>, PdfError> { + let mut strikethroughs: Vec<(String, Vec)> = Vec::new(); + + if !other_dict.is_empty() { + let annotations = other_dict.get("Annots".into()); + if let Some(annotations) = annotations { + let annotations_resolved = annotations.clone().resolve(resolver)?; + let annotations_array = annotations_resolved.into_array()?; + for annotation in annotations_array.iter() { + let mut paths: Vec = Vec::new(); + let annotation_resolved = annotation.clone().resolve(resolver)?; + let annotation_dict = annotation_resolved.into_dictionary()?; + + // If you have multiline strikethrough "Rect" will be the bounding + // box around all the strikethrough lines. + // "QuadPoints" gives 8 points for each line that is struckthrough, + // so if a single annotation involves text on two lines, QuadPoints + // should have 16 values in it. It starts with bottom left and + // runs counter-clockwise. + let subtype = annotation_dict.get("Subtype".into()); + if let Some(subtype) = subtype { + let subtype = subtype.clone().into_name()?; + if subtype.as_str() == "StrikeOut" { + let rects = annotation_dict.get("QuadPoints".into()); + let text = annotation_dict.get("Contents".into()); + if let (Some(rects), Some(text)) = (rects, text) { + let text = text.to_string()?; + + // Check multiples of 8. + let rects_array = rects.clone().into_array()?; + if rects_array.len() % 8 == 0 { + let rects: Vec> = + rects_array.chunks(8).map(|chunk| chunk.to_vec()).collect(); + + for rect in rects { + let mut quad_points: Vec = Vec::new(); + for num in rect { + let number = num.as_number()?; + quad_points.push(number); + } + if quad_points.len() == 8 { + paths.push(ViewRect { + x: quad_points[0], + y: quad_points[1], + width: quad_points[2] - quad_points[0], + height: quad_points[7] - quad_points[1], + }); + } + } + strikethroughs.push((text, paths)) + } + } + } + } + } + } + } + + Ok(strikethroughs) +} diff --git a/src-pdfrs/pdf/examples/read.rs b/src-pdfrs/pdf/examples/read.rs new file mode 100644 index 0000000..bdd8f5f --- /dev/null +++ b/src-pdfrs/pdf/examples/read.rs @@ -0,0 +1,128 @@ +extern crate pdf; + +use std::collections::HashMap; +use std::env::args; +use std::fs; +use std::time::SystemTime; + +use pdf::enc::StreamFilter; +use pdf::error::PdfError; +use pdf::file::{FileOptions, Log}; +use pdf::object::*; +use pdf::primitive::Primitive; + +struct VerboseLog; +impl Log for VerboseLog { + fn load_object(&self, r: PlainRef) { + println!("load {r:?}"); + } + fn log_get(&self, r: PlainRef) { + println!("get {r:?}"); + } +} + +#[cfg(feature = "cache")] +fn main() -> Result<(), PdfError> { + let path = "/home/kschuettler/Dokumente/TestFiles/SYNGENTA_EFSA_sanitisation_GFL_v1.pdf"; //args().nth(1).expect("no file given"); + println!("read: {}", path); + let now = SystemTime::now(); + + let file = FileOptions::cached().log(VerboseLog).open(&path).unwrap(); + let resolver = file.resolver(); + + if let Some(ref info) = file.trailer.info_dict { + let title = info.title.as_ref().map(|p| p.to_string_lossy()); + let author = info.author.as_ref().map(|p| p.to_string_lossy()); + + let descr = match (title, author) { + (Some(title), None) => title, + (None, Some(author)) => format!("[no title] – {}", author), + (Some(title), Some(author)) => format!("{} – {}", title, author), + _ => "PDF".into(), + }; + println!("{}", descr); + } + + let mut images: Vec<_> = vec![]; + let mut fonts = HashMap::new(); + + for page in file.pages() { + let page = page.unwrap(); + let resources = page.resources().unwrap(); + for (i, font) in resources + .fonts + .values() + .map(|lazy_font| lazy_font.load(&resolver)) + .filter_map(|f| f.ok()) + .enumerate() + { + let name = match &font.name { + Some(name) => name.as_str().into(), + None => i.to_string(), + }; + fonts.insert(name, font.clone()); + } + images.extend( + resources + .xobjects + .iter() + .map(|(_name, &r)| resolver.get(r).unwrap()) + .filter(|o| matches!(**o, XObject::Image(_))), + ); + } + + for (i, o) in images.iter().enumerate() { + let img = match **o { + XObject::Image(ref im) => im, + _ => continue, + }; + let (mut data, filter) = img.raw_image_data(&resolver)?; + let ext = match filter { + Some(StreamFilter::DCTDecode(_)) => "jpeg", + Some(StreamFilter::JBIG2Decode(_)) => "jbig2", + Some(StreamFilter::JPXDecode) => "jp2k", + Some(StreamFilter::FlateDecode(_)) => "png", + Some(StreamFilter::CCITTFaxDecode(_)) => { + data = fax::tiff::wrap(&data, img.width, img.height).into(); + "tiff" + } + _ => continue, + }; + + let fname = format!("extracted_image_{}.{}", i, ext); + + fs::write(fname.as_str(), data).unwrap(); + println!("Wrote file {}", fname); + } + println!("Found {} image(s).", images.len()); + + for (name, font) in fonts.iter() { + let fname = format!("font_{}", name); + if let Some(Ok(data)) = font.embedded_data(&resolver) { + fs::write(fname.as_str(), data).unwrap(); + println!("Wrote file {}", fname); + } + } + println!("Found {} font(s).", fonts.len()); + + if let Some(ref forms) = file.get_root().forms { + println!("Forms:"); + for field in forms.fields.iter() { + print!(" {:?} = ", field.name); + match field.value { + Primitive::String(ref s) => println!("{}", s.to_string_lossy()), + Primitive::Integer(i) => println!("{}", i), + Primitive::Name(ref s) => println!("{}", s), + ref p => println!("{:?}", p), + } + } + } + + if let Ok(elapsed) = now.elapsed() { + println!( + "Time: {}s", + elapsed.as_secs() as f64 + elapsed.subsec_nanos() as f64 * 1e-9 + ); + } + Ok(()) +} diff --git a/src-pdfrs/pdf/fuzz/.gitignore b/src-pdfrs/pdf/fuzz/.gitignore new file mode 100644 index 0000000..572e03b --- /dev/null +++ b/src-pdfrs/pdf/fuzz/.gitignore @@ -0,0 +1,4 @@ + +target +corpus +artifacts diff --git a/src-pdfrs/pdf/fuzz/Cargo.lock b/src-pdfrs/pdf/fuzz/Cargo.lock new file mode 100644 index 0000000..a44540d --- /dev/null +++ b/src-pdfrs/pdf/fuzz/Cargo.lock @@ -0,0 +1,735 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "arbitrary" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" + +[[package]] +name = "async-trait" +version = "0.1.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "bumpalo" +version = "3.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1628fb46dfa0b37568d12e5edd512553eccf6a22a78e8bde00bb4aed84d5bdbf" + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "dary_heap" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728" + +[[package]] +name = "datasize" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e65c07d59e45d77a8bda53458c24a828893a99ac6cdd9c84111e09176ab739a2" +dependencies = [ + "datasize_derive", +] + +[[package]] +name = "datasize_derive" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613e4ee15899913285b7612004bbd490abd605be7b11d35afada5902fb6b91d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "deflate" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86f7e25f518f4b81808a2cf1c50996a61f5c2eb394b2393bd87f2a4780a432f" +dependencies = [ + "adler32", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "fax" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03e33ad0e71af414ef9d2b0a94d23ff59115bb068e6a6a06c0952f2c22ffd77" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c1d7ffc9f2dc8316348c75281a99c8fdc60c1ddf4f82a366d117bf1b74d5a39" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "globalcache" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240a3059d86f2ba6859ac79f95ff94e65606abc775c1bc0ecf9b6590fb35dc04" +dependencies = [ + "async-trait", + "tuple", + "web-time", +] + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "indexmap" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "istring" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875cc6fb9aecbc1a9bd736f2d18b12e0756b4c80c5e35e28262154abcb077a39" +dependencies = [ + "datasize", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "jobserver" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "jpeg-decoder" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libflate" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" +dependencies = [ + "core2", + "hashbrown 0.14.5", + "rle-decode-fast", +] + +[[package]] +name = "libfuzzer-sys" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf78f52d400cf2d84a3a973a78a592b4adc535739e0a5597a0da6f0c357adc75" +dependencies = [ + "arbitrary", + "cc", +] + +[[package]] +name = "log" +version = "0.4.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "pdf" +version = "0.9.1" +dependencies = [ + "aes", + "bitflags", + "cbc", + "datasize", + "deflate", + "fax", + "globalcache", + "indexmap", + "istring", + "itertools", + "jpeg-decoder", + "libflate", + "log", + "md5", + "once_cell", + "pdf_derive", + "sha2", + "snafu", + "stringprep", + "weezl", +] + +[[package]] +name = "pdf-fuzz" +version = "0.0.0" +dependencies = [ + "libfuzzer-sys", + "pdf", +] + +[[package]] +name = "pdf_derive" +version = "0.2.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "snafu" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tuple" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb9f6bd73479481158ba8ee3edf17aca93354623d13f02e96a2014fdbc1c37e" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a210d160f08b701c8721ba1c726c11662f877ea6b7094007e1ca9a1041945034" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "weezl" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] diff --git a/src-pdfrs/pdf/fuzz/Cargo.toml b/src-pdfrs/pdf/fuzz/Cargo.toml new file mode 100644 index 0000000..c4fba38 --- /dev/null +++ b/src-pdfrs/pdf/fuzz/Cargo.toml @@ -0,0 +1,26 @@ + +[package] +name = "pdf-fuzz" +version = "0.0.0" +authors = ["Automatically generated"] +publish = false +edition = "2018" + +[package.metadata] +cargo-fuzz = true + +[dependencies] +libfuzzer-sys = "0.4" + +[dependencies.pdf] +path = ".." + +# Prevent this from interfering with workspaces +[workspace] +members = ["."] + +[[bin]] +name = "parse" +path = "fuzz_targets/parse.rs" +test = false +doc = false diff --git a/src-pdfrs/pdf/fuzz/fuzz_targets/parse.rs b/src-pdfrs/pdf/fuzz/fuzz_targets/parse.rs new file mode 100644 index 0000000..2b3d2fd --- /dev/null +++ b/src-pdfrs/pdf/fuzz/fuzz_targets/parse.rs @@ -0,0 +1,14 @@ +#![no_main] +use libfuzzer_sys::fuzz_target; + +fn harness(data: &[u8]) { + if let Ok(file) = pdf::file::FileOptions::cached().load(data) { + for idx in 0..file.num_pages() { + let _ = file.get_page(idx); + } + } +} + +fuzz_target!(|data: &[u8]| { + let _ = harness(data); +}); \ No newline at end of file diff --git a/src-pdfrs/pdf/src/any.rs b/src-pdfrs/pdf/src/any.rs new file mode 100644 index 0000000..4d1349c --- /dev/null +++ b/src-pdfrs/pdf/src/any.rs @@ -0,0 +1,121 @@ +use std::any::TypeId; +use std::rc::Rc; +use std::sync::Arc; +use datasize::DataSize; +use crate::object::{Object}; +use crate::error::{Result, PdfError}; + +pub trait AnyObject { + fn type_name(&self) -> &'static str; + fn type_id(&self) -> TypeId; + fn size(&self) -> usize; +} + +#[repr(transparent)] +pub struct NoSize(T); +impl AnyObject for NoSize { + fn size(&self) -> usize { + 0 + } + fn type_id(&self) -> TypeId { + TypeId::of::() + } + fn type_name(&self) -> &'static str { + std::any::type_name::() + } +} + +#[repr(transparent)] +pub struct WithSize(T); +impl AnyObject for WithSize { + fn size(&self) -> usize { + datasize::data_size(&self.0) + } + fn type_id(&self) -> TypeId { + TypeId::of::() + } + fn type_name(&self) -> &'static str { + std::any::type_name::() + } +} + +#[derive(DataSize)] +pub struct Any(Rc); + +impl Any { + pub fn downcast(self) -> Result> + where T: AnyObject + 'static + { + if TypeId::of::() == self.0.type_id() { + unsafe { + let raw: *const dyn AnyObject = Rc::into_raw(self.0); + Ok(Rc::from_raw(raw as *const T)) + } + } else { + Err(type_mismatch::(self.0.type_name())) + } + } + pub fn new(rc: Rc) -> Any + where WithSize: AnyObject, T: 'static + { + Any(unsafe { + std::mem::transmute::, Rc>>(rc) + } as _) + } + pub fn new_without_size(rc: Rc) -> Any + where NoSize: AnyObject, T: 'static + { + Any(unsafe { + std::mem::transmute::, Rc>>(rc) + } as _) + } + pub fn type_name(&self) -> &'static str { + self.0.type_name() + } +} + +#[derive(Clone, DataSize)] +pub struct AnySync(Arc); + +#[cfg(feature="cache")] +impl globalcache::ValueSize for AnySync { + #[inline] + fn size(&self) -> usize { + self.0.size() + } +} + +impl AnySync { + pub fn downcast(self) -> Result> + where T: 'static + { + if TypeId::of::() == self.0.type_id() { + unsafe { + let raw: *const (dyn AnyObject+Sync+Send) = Arc::into_raw(self.0); + Ok(Arc::from_raw(raw as *const T)) + } + } else { + Err(type_mismatch::(self.0.type_name())) + } + } + pub fn new(arc: Arc) -> AnySync + where WithSize: AnyObject, T: Sync + Send + 'static + { + AnySync(unsafe { + std::mem::transmute::, Arc>>(arc) + } as _) + } + pub fn new_without_size(arc: Arc) -> AnySync + where NoSize: AnyObject, T: Sync + Send + 'static + { + AnySync(unsafe { + std::mem::transmute::, Arc>>(arc) + } as _) + } + pub fn type_name(&self) -> &'static str { + self.0.type_name() + } +} +fn type_mismatch(name: &str) -> PdfError { + PdfError::Other { msg: format!("expected {}, found {}", std::any::type_name::(), name) } +} diff --git a/src-pdfrs/pdf/src/backend.rs b/src-pdfrs/pdf/src/backend.rs new file mode 100644 index 0000000..df5b059 --- /dev/null +++ b/src-pdfrs/pdf/src/backend.rs @@ -0,0 +1,179 @@ +use crate::error::*; +use crate::parser::Lexer; +use crate::parser::read_xref_and_trailer_at; +use crate::xref::XRefTable; +use crate::primitive::Dictionary; +use crate::object::*; +use std::ops::Deref; + +use std::ops::{ + RangeFull, + RangeFrom, + RangeTo, + Range, +}; + +pub const MAX_ID: u32 = 1_000_000; + +pub trait Backend: Sized { + fn read(&self, range: T) -> Result<&[u8]>; + //fn write(&mut self, range: T) -> Result<&mut [u8]>; + fn len(&self) -> usize; + fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns the offset of the beginning of the file, i.e., where the `%PDF-1.5` header is. + /// (currently only used internally!) + fn locate_start_offset(&self) -> Result { + // Read from the beginning of the file, and look for the header. + // Implementation note 13 in version 1.7 of the PDF reference says that Acrobat viewers + // expect the header to be within the first 1KB of the file, so we do the same here. + const HEADER: &[u8] = b"%PDF-"; + let buf = t!(self.read(..std::cmp::min(1024, self.len()))); + buf + .windows(HEADER.len()) + .position(|window| window == HEADER) + .ok_or_else(|| PdfError::Other{ msg: "file header is missing".to_string() }) + } + + /// Returns the value of startxref (currently only used internally!) + fn locate_xref_offset(&self) -> Result { + // locate the xref offset at the end of the file + // `\nPOS\n%%EOF` where POS is the position encoded as base 10 integer. + // u64::MAX has 20 digits + \n\n(2) + %%EOF(5) = 27 bytes max. + + let mut lexer = Lexer::new(t!(self.read(..))); + lexer.set_pos_from_end(0); + t!(lexer.seek_substr_back(b"startxref")); + t!(lexer.next()).to::() + } + + /// Used internally by File, but could also be useful for applications that want to look at the raw PDF objects. + fn read_xref_table_and_trailer(&self, start_offset: usize, resolve: &impl Resolve) -> Result<(XRefTable, Dictionary)> { + let xref_offset = t!(self.locate_xref_offset()); + let pos = t!(start_offset.checked_add(xref_offset).ok_or(PdfError::Invalid)); + if pos >= self.len() { + bail!("XRef offset outside file bounds"); + } + + let mut lexer = Lexer::with_offset(t!(self.read(pos ..)), pos); + + let (xref_sections, trailer) = t!(read_xref_and_trailer_at(&mut lexer, resolve)); + + let highest_id = t!(trailer.get("Size") + .ok_or_else(|| PdfError::MissingEntry {field: "Size".into(), typ: "XRefTable"})? + .as_u32()); + + if highest_id > MAX_ID { + bail!("too many objects"); + } + let mut refs = XRefTable::new(highest_id as ObjNr); + for section in xref_sections { + refs.add_entries_from(section)?; + } + + let mut prev_trailer = { + match trailer.get("Prev") { + Some(p) => Some(t!(p.as_usize())), + None => None + } + }; + trace!("READ XREF AND TABLE"); + let mut seen = vec![]; + while let Some(prev_xref_offset) = prev_trailer { + if seen.contains(&prev_xref_offset) { + bail!("xref offsets loop"); + } + seen.push(prev_xref_offset); + + let pos = t!(start_offset.checked_add(prev_xref_offset).ok_or(PdfError::Invalid)); + let mut lexer = Lexer::with_offset(t!(self.read(pos..)), pos); + let (xref_sections, trailer) = t!(read_xref_and_trailer_at(&mut lexer, resolve)); + + for section in xref_sections { + refs.add_entries_from(section)?; + } + + prev_trailer = { + match trailer.get("Prev") { + Some(p) => { + let prev = t!(p.as_usize()); + Some(prev) + } + None => None + } + }; + } + Ok((refs, trailer)) + } +} + + +impl Backend for T where T: Deref { //+ DerefMut { + fn read(&self, range: R) -> Result<&[u8]> { + let r = t!(range.to_range(self.len())); + Ok(&self[r]) + } + /* + fn write(&mut self, range: R) -> Result<&mut [u8]> { + let r = range.to_range(self.len())?; + Ok(&mut self[r]) + } + */ + fn len(&self) -> usize { + (**self).len() + } +} + +/// `IndexRange` is implemented by Rust's built-in range types, produced +/// by range syntax like `..`, `a..`, `..b` or `c..d`. +pub trait IndexRange +{ + /// Start index (inclusive) + fn start(&self) -> Option; + + /// End index (exclusive) + fn end(&self) -> Option; + + /// `len`: the size of whatever container that is being indexed + fn to_range(&self, len: usize) -> Result> { + match (self.start(), self.end()) { + (None, None) => Ok(0 .. len), + (Some(start), None) if start <= len => Ok(start .. len), + (None, Some(end)) if end <= len => Ok(0 .. end), + (Some(start), Some(end)) if start <= end && end <= len => Ok(start .. end), + _ => Err(PdfError::ContentReadPastBoundary) + } + } +} + + +impl IndexRange for RangeFull { + #[inline] + fn start(&self) -> Option { None } + #[inline] + fn end(&self) -> Option { None } + +} + +impl IndexRange for RangeFrom { + #[inline] + fn start(&self) -> Option { Some(self.start) } + #[inline] + fn end(&self) -> Option { None } +} + +impl IndexRange for RangeTo { + #[inline] + fn start(&self) -> Option { None } + #[inline] + fn end(&self) -> Option { Some(self.end) } +} + +impl IndexRange for Range { + #[inline] + fn start(&self) -> Option { Some(self.start) } + #[inline] + fn end(&self) -> Option { Some(self.end) } +} diff --git a/src-pdfrs/pdf/src/build.rs b/src-pdfrs/pdf/src/build.rs new file mode 100644 index 0000000..99ffbfd --- /dev/null +++ b/src-pdfrs/pdf/src/build.rs @@ -0,0 +1,406 @@ +use std::collections::HashMap; +use std::collections::HashSet; +use std::ops::Range; +use std::sync::Arc; + +use datasize::DataSize; + +use crate::PdfError; +use crate::any::AnySync; +use crate::enc::StreamFilter; +use crate::file::Cache; +use crate::file::FileOptions; +use crate::file::Log; +use crate::file::Storage; +use crate::file::Trailer; +use crate::object::*; +use crate::content::*; +use crate::error::Result; +use crate::parser::ParseFlags; +use crate::primitive::Dictionary; +use crate::primitive::Primitive; + +#[derive(Default)] +pub struct PageBuilder { + pub ops: Vec, + pub media_box: Option, + pub crop_box: Option, + pub trim_box: Option, + pub resources: Resources, + pub rotate: i32, + pub metadata: Option, + pub lgi: Option, + pub vp: Option, + pub other: Dictionary, +} +impl PageBuilder { + pub fn from_content(content: Content, resolve: &impl Resolve) -> Result { + Ok(PageBuilder { + ops: content.operations(resolve)?, + .. PageBuilder::default() + }) + } + pub fn from_page(page: &Page, resolve: &impl Resolve) -> Result { + Ok(PageBuilder { + ops: page.contents.as_ref().map(|c| c.operations(resolve)).transpose()?.unwrap_or_default(), + media_box: Some(page.media_box()?), + crop_box: Some(page.crop_box()?), + trim_box: page.trim_box, + resources: (**page.resources()?.data()).clone(), + rotate: page.rotate, + metadata: page.metadata.clone(), + lgi: page.lgi.clone(), + vp: page.vp.clone(), + other: page.other.clone(), + }) + } + pub fn clone_page(page: &Page, cloner: &mut impl Cloner) -> Result { + let old_resources = &**page.resources()?.data(); + + let mut resources = Resources::default(); + let ops = page.contents.as_ref() + .map(|content| content.operations(cloner)).transpose()? + .map(|ops| { + ops.into_iter().map(|op| -> Result { + deep_clone_op(&op, cloner, old_resources, &mut resources) + }).collect() + }) + .transpose()? + .unwrap_or_default(); + + Ok(PageBuilder { + ops, + media_box: Some(page.media_box()?), + crop_box: Some(page.crop_box()?), + trim_box: page.trim_box, + resources, + rotate: page.rotate, + metadata: page.metadata.deep_clone(cloner)?, + lgi: page.lgi.deep_clone(cloner)?, + vp: page.vp.deep_clone(cloner)?, + other: page.other.deep_clone(cloner)?, + }) + } + pub fn size(&mut self, width: f32, height: f32) { + self.media_box = Some(Rectangle { + top: 0., + left: 0., + bottom: height, + right: width, + }); + } +} + +pub struct CatalogBuilder { + pages: Vec +} +impl CatalogBuilder { + pub fn from_pages(pages: Vec) -> CatalogBuilder { + CatalogBuilder { + pages + } + } + pub fn build(self, update: &mut impl Updater) -> Result { + let kids_promise: Vec<_> = self.pages.iter() + .map(|_page| update.promise::()) + .collect(); + let kids: Vec<_> = kids_promise.iter() + .map(|p| Ref::new(p.get_inner())) + .collect(); + + let tree = PagesRc::create(PageTree { + parent: None, + count: kids.len() as _, + kids, + resources: None, + media_box: None, + crop_box: None + }, update)?; + + for (page, promise) in self.pages.into_iter().zip(kids_promise) { + let content = Content::from_ops(page.ops); + let resources = update.create(page.resources)?.into(); + let page = Page { + parent: tree.clone(), + contents: Some(content), + media_box: page.media_box, + crop_box: page.crop_box, + trim_box: page.trim_box, + resources: Some(resources), + rotate: page.rotate, + metadata: page.metadata, + lgi: page.lgi, + vp: page.vp, + other: page.other, + annotations: Default::default(), + }; + update.fulfill(promise, PagesNode::Leaf(page))?; + } + + Ok(Catalog { + version: Some("1.7".into()), + pages: tree, + names: None, + dests: None, + metadata: None, + outlines: None, + struct_tree_root: None, + forms: None, + page_labels: None, + }) + } +} + +pub struct PdfBuilder { + pub storage: Storage, SC, OC, L>, + pub info: Option, + pub id: Option<[String; 2]>, + +} +impl PdfBuilder +where + SC: Cache>>, + OC: Cache, Arc>>, + L: Log, +{ + pub fn new(fileoptions: FileOptions<'_, SC, OC, L>) -> Self { + let storage = fileoptions.storage(); + PdfBuilder { + storage, + info: None, + id: None + } + } + pub fn info(mut self, info: InfoDict) -> Self { + self.info = Some(info); + self + } + pub fn id(mut self, a: String, b: String) -> Self { + self.id = Some([a, b]); + self + } + pub fn build(mut self, catalog: CatalogBuilder) -> Result> { + let catalog = catalog.build(&mut self.storage)?; + let info = self.info.take(); + let mut trailer = Trailer { + root: self.storage.create(catalog)?, + encrypt_dict: None, + size: 0, + id: vec!["foo".into(), "bar".into()], + info_dict: info.map(|info| self.storage.create(info)).transpose()?, + prev_trailer_pos: None, + other: Dictionary::new(), + }; + self.storage.save(&mut trailer)?; + Ok(self.storage.into_inner()) + } +} +pub struct Importer<'a, R, U> { + resolver: R, + map: HashMap, + updater: &'a mut U, + rcrefs: HashMap, + // ptr of old -> (old, new) + shared: HashMap, +} + +pub struct ImporterMap { + resolver: R, + map: HashMap, +} + +impl<'a, R, U> Importer<'a, R, U> { + pub fn new(resolver: R, updater: &'a mut U) -> Self { + Importer { + resolver, + updater, + map: Default::default(), + rcrefs: Default::default(), + shared: Default::default(), + } + } +} +impl<'a, R: Resolve, U> Importer<'a, R, U> { + pub fn finish(self) -> ImporterMap { + ImporterMap { resolver: self.resolver, map: self.map } + } +} +impl ImporterMap { + fn compare_dict(&self, a_dict: &Dictionary, b_dict: &Dictionary, new_resolve: &impl Resolve) -> Result { + let mut same = true; + let mut b_unvisited: HashSet<_> = b_dict.keys().collect(); + for (a_key, a_val) in a_dict.iter() { + if let Some(b_val) = b_dict.get(a_key) { + if !self.compare_prim(a_val, b_val, new_resolve)? { + println!("value for key {a_key} mismatch."); + same = false; + } + b_unvisited.remove(a_key); + } else { + println!("missing key {a_key} in b."); + same = false; + } + } + for b_key in b_unvisited.iter() { + println!("missing key {b_key} in a."); + } + Ok(same && !b_unvisited.is_empty()) + } + fn compare_prim(&self, a: &Primitive, b: &Primitive, new_resolve: &impl Resolve) -> Result { + match (a, b) { + (Primitive::Array(a_parts), Primitive::Array(b_parts)) => { + if a_parts.len() != b_parts.len() { + dbg!(a_parts, b_parts); + println!("different length {} vs. {}", a_parts.len(), b_parts.len()); + println!("a = {a_parts:?}"); + println!("b = {b_parts:?}"); + return Ok(false); + } + for (a, b) in a_parts.iter().zip(b_parts.iter()) { + if !self.compare_prim(a, b, new_resolve)? { + return Ok(false); + } + } + Ok(true) + } + (Primitive::Dictionary(a_dict), Primitive::Dictionary(b_dict)) => { + self.compare_dict(a_dict, b_dict, new_resolve) + } + (Primitive::Reference(r1), Primitive::Reference(r2)) => { + match self.map.get(&r1) { + Some(r) if r == r2 => Ok(true), + _ => Ok(false) + } + } + (Primitive::Stream(a_s), Primitive::Stream(b_s)) => { + if !self.compare_dict(&a_s.info, &b_s.info, new_resolve)? { + println!("stream dicts differ"); + return Ok(false) + } + let a_data = a_s.raw_data(&self.resolver)?; + let b_data = b_s.raw_data(new_resolve)?; + if a_data != b_data { + println!("data differs."); + return Ok(false) + } + Ok(true) + } + (Primitive::Integer(a), Primitive::Number(b)) => Ok(*a as f32 == *b), + (Primitive::Number(a), Primitive::Integer(b)) => Ok(*a == *b as f32), + (Primitive::Reference(a_ref), b) => { + let a = self.resolver.resolve(*a_ref)?; + self.compare_prim(&a, b, new_resolve) + } + (a, Primitive::Reference(b_ref)) => { + let b = new_resolve.resolve(*b_ref)?; + self.compare_prim(a, &b, new_resolve) + } + (ref a, ref b) => { + if a == b { + Ok(true) + } else { + println!("{a:?} != {b:?}"); + Ok(false) + } + } + } + } + pub fn verify(&self, new_resolve: &impl Resolve) -> Result { + let mut same = true; + for (&old_ref, &new_ref) in self.map.iter() { + let old = self.resolver.resolve(old_ref)?; + let new = new_resolve.resolve(new_ref)?; + + if !self.compare_prim(&old, &new, new_resolve)? { + same = false; + } + } + Ok(same) + } +} + +impl<'a, R: Resolve, U> Resolve for Importer<'a, R, U> { + fn get(&self, r: Ref) -> Result> { + self.resolver.get(r) + } + fn get_data_or_decode(&self, id: PlainRef, range: Range, filters: &[StreamFilter]) -> Result> { + self.resolver.get_data_or_decode(id, range, filters) + } + fn options(&self) -> &ParseOptions { + self.resolver.options() + } + fn resolve(&self, r: PlainRef) -> Result { + self.resolver.resolve(r) + } + fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, depth: usize) -> Result { + self.resolver.resolve_flags(r, flags, depth) + } + fn stream_data(&self, id: PlainRef, range: Range) -> Result> { + self.resolver.stream_data(id, range) + } +} +impl<'a, R, U: Updater> Updater for Importer<'a, R, U> { + fn create(&mut self, obj: T) -> Result> { + self.updater.create(obj) + } + fn fulfill(&mut self, promise: PromisedRef, obj: T) -> Result> { + self.updater.fulfill(promise, obj) + } + fn promise(&mut self) -> PromisedRef { + self.updater.promise() + } + fn update(&mut self, old: PlainRef, obj: T) -> Result> { + self.updater.update(old, obj) + } +} +impl<'a, R: Resolve, U: Updater> Cloner for Importer<'a, R, U> { + fn clone_ref(&mut self, old: Ref) -> Result> { + if let Some(&new_ref) = self.map.get(&old.get_inner()) { + return Ok(Ref::new(new_ref)); + } + let obj = self.resolver.get(old)?; + let clone = obj.deep_clone(self)?; + + let r = self.updater.create(clone)?; + self.map.insert(old.get_inner(), r.get_ref().get_inner()); + + Ok(r.get_ref()) + } + fn clone_plainref(&mut self, old: PlainRef) -> Result { + if let Some(&new_ref) = self.map.get(&old) { + return Ok(new_ref); + } + let obj = self.resolver.resolve(old)?; + let clone = obj.deep_clone(self)?; + + let new = self.updater.create(clone)? + .get_ref().get_inner(); + + self.map.insert(old, new); + + Ok(new) + } + fn clone_rcref(&mut self, old: &RcRef) -> Result> { + let old_ref = old.get_ref().get_inner(); + if let Some(&new_ref) = self.map.get(&old_ref) { + let arc = self.rcrefs.get(&new_ref).unwrap().clone().downcast()?; + return Ok(RcRef::new(new_ref, arc)); + } + + let new = old.data().deep_clone(self)?; + let new = self.updater.create::(new)?; + self.rcrefs.insert(new.get_ref().get_inner(), AnySync::new(new.data().clone())); + self.map.insert(old_ref, new.get_ref().get_inner()); + + Ok(new) + } + fn clone_shared(&mut self, old: &Shared) -> Result> { + let key = &**old as *const T as usize; + if let Some((old, new)) = self.shared.get(&key) { + return new.clone().downcast(); + } + let new = Shared::new(old.as_ref().deep_clone(self)?); + self.shared.insert(key, (AnySync::new_without_size(old.clone()), AnySync::new_without_size(new.clone()))); + Ok(new) + } +} \ No newline at end of file diff --git a/src-pdfrs/pdf/src/content.rs b/src-pdfrs/pdf/src/content.rs new file mode 100644 index 0000000..7acf38d --- /dev/null +++ b/src-pdfrs/pdf/src/content.rs @@ -0,0 +1,1339 @@ +/// PDF content streams. +use std::fmt::{self, Display}; +use std::cmp::Ordering; +use itertools::Itertools; +use istring::SmallString; +use datasize::DataSize; +use std::sync::Arc; + +use crate::error::*; +use crate::object::*; +use crate::parser::{Lexer, parse_with_lexer, ParseFlags}; +use crate::primitive::*; +use crate::enc::StreamFilter; +use crate as pdf; + +/// Represents a PDF content stream - a `Vec` of `Operator`s +#[derive(Debug, Clone, DataSize)] +pub struct Content { + /// The raw content stream parts. usually one, but could be any number. + pub parts: Vec>, +} + +impl Content { + pub fn operations(&self, resolve: &impl Resolve) -> Result> { + let mut data = vec![]; + for part in self.parts.iter() { + data.extend_from_slice(&t!(part.data(resolve))); + } + parse_ops(&data, resolve) + } +} + +pub fn parse_ops(data: &[u8], resolve: &impl Resolve) -> Result> { + let mut ops = OpBuilder::new(); + ops.parse(data, resolve)?; + Ok(ops.ops) +} + +pub fn parse_raw(data: &[u8]) -> Result> { + let mut lexer = Lexer::new(data); + let mut result = vec![]; + while let Ok(next) = lexer.next() { + result.push(next.to_string()); + }; + Ok(result) +} + +macro_rules! names { + ($args:ident, $($x:ident),*) => ( + $( + let $x = name(&mut $args)?; + )* + ) +} +macro_rules! numbers { + ($args:ident, $($x:ident),*) => ( + $( + let $x = number(&mut $args)?; + )* + ) +} +macro_rules! points { + ($args:ident, $($point:ident),*) => ( + $( + let $point = point(&mut $args)?; + )* + ) +} +fn name(args: &mut impl Iterator) -> Result { + args.next().ok_or(PdfError::NoOpArg)?.into_name() +} +fn number(args: &mut impl Iterator) -> Result { + args.next().ok_or(PdfError::NoOpArg)?.as_number() +} +fn string(args: &mut impl Iterator) -> Result { + args.next().ok_or(PdfError::NoOpArg)?.into_string() +} +fn point(args: &mut impl Iterator) -> Result { + let x = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let y = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + Ok(Point { x, y }) +} +fn rect(args: &mut impl Iterator) -> Result { + let x = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let y = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let width = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let height = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + Ok(ViewRect { x, y, width, height }) +} +fn rgb(args: &mut impl Iterator) -> Result { + let red = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let green = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let blue = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + Ok(Rgb { red, green, blue }) +} +fn cmyk(args: &mut impl Iterator) -> Result { + let cyan = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let magenta = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let yellow = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + let key = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + Ok(Cmyk { cyan, magenta, yellow, key }) +} +fn matrix(args: &mut impl Iterator) -> Result { + Ok(Matrix { + a: number(args)?, + b: number(args)?, + c: number(args)?, + d: number(args)?, + e: number(args)?, + f: number(args)?, + }) +} +fn array(args: &mut impl Iterator) -> Result> { + match args.next() { + Some(Primitive::Array(arr)) => Ok(arr), + None => Ok(vec![]), + _ => Err(PdfError::NoOpArg) + } +} + +fn expand_abbr_name(name: SmallString, alt: &[(&str, &str)]) -> SmallString { + for &(p, r) in alt { + if name == p { + return r.into(); + } + } + name +} +fn expand_abbr(p: Primitive, alt: &[(&str, &str)]) -> Primitive { + match p { + Primitive::Name(name) => Primitive::Name(expand_abbr_name(name, alt)), + Primitive::Array(items) => Primitive::Array(items.into_iter().map(|p| expand_abbr(p, alt)).collect()), + p => p + } +} + +fn inline_image(lexer: &mut Lexer, resolve: &impl Resolve) -> Result> { + let mut dict = Dictionary::new(); + loop { + let backup_pos = lexer.get_pos(); + let obj = parse_with_lexer(lexer, &NoResolve, ParseFlags::ANY); + let key = match obj { + Ok(Primitive::Name(key)) => key, + Err(e) if e.is_eof() => return Err(e), + Err(_) => { + lexer.set_pos(backup_pos); + break; + } + Ok(_) => bail!("invalid key type") + }; + let key = expand_abbr_name(key, &[ + ("BPC", "BitsPerComponent"), + ("CS", "ColorSpace"), + ("D", "Decode"), + ("DP", "DecodeParms"), + ("F", "Filter"), + ("H", "Height"), + ("IM", "ImageMask"), + ("I", "Interpolate"), + ("W", "Width"), + ]); + let val = parse_with_lexer(lexer, &NoResolve, ParseFlags::ANY)?; + dict.insert(key, val); + } + lexer.next_expect("ID")?; + let data_start = lexer.get_pos() + 1; + + // find the end before try parsing. + if lexer.seek_substr("\nEI").is_none() { + bail!("inline image exceeds expected data range"); + } + let data_end = lexer.get_pos() - 3; + + // ugh + let bits_per_component = dict.get("BitsPerComponent").map(|p| p.as_integer()).transpose()?; + let color_space = dict.get("ColorSpace").map(|p| ColorSpace::from_primitive(expand_abbr(p.clone(), + &[ + ("G", "DeviceGray"), + ("RGB", "DeviceRGB"), + ("CMYK", "DeviceCMYK"), + ("I", "Indexed") + ] + ), resolve)).transpose()?; + let decode = dict.get("Decode").map(|p| Object::from_primitive(p.clone(), resolve)).transpose()?; + let decode_parms = dict.get("DecodeParms").map(|p| p.clone().resolve(resolve)?.into_dictionary()).transpose()?.unwrap_or_default(); + let filter = dict.remove("Filter").map(|p| expand_abbr(p, + &[ + ("AHx", "ASCIIHexDecode"), + ("A85", "ASCII85Decode"), + ("LZW", "LZWDecode"), + ("Fl", "FlateDecode"), + ("RL", "RunLengthDecode"), + ("CCF", "CCITTFaxDecode"), + ("DCT", "DCTDecode"), + ] + )); + let filters = match filter { + Some(Primitive::Array(parts)) => parts.into_iter() + .map(|p| p.as_name().and_then(|kind| StreamFilter::from_kind_and_params(kind, decode_parms.clone(), resolve))) + .collect::>()?, + Some(Primitive::Name(kind)) => vec![StreamFilter::from_kind_and_params(&kind, decode_parms, resolve)?], + None => vec![], + _ => bail!("invalid filter") + }; + + let height = dict.require("InlineImage", "Height")?.as_u32()?; + let image_mask = dict.get("ImageMask").map(|p| p.as_bool()).transpose()?.unwrap_or(false); + let intent = dict.remove("Intent").map(|p| RenderingIntent::from_primitive(p, &NoResolve)).transpose()?; + let interpolate = dict.get("Interpolate").map(|p| p.as_bool()).transpose()?.unwrap_or(false); + let width = dict.require("InlineImage", "Width")?.as_u32()?; + + let image_dict = ImageDict { + width, + height, + color_space, + bits_per_component, + intent, + image_mask, + mask: None, + decode, + interpolate, + struct_parent: None, + id: None, + smask: None, + other: dict, + }; + + let data = lexer.new_substr(data_start .. data_end).to_vec(); + + Ok(Arc::new(ImageXObject { inner: Stream::from_compressed(image_dict, data, filters) })) +} + +struct OpBuilder { + last: Point, + compability_section: bool, + ops: Vec +} +impl OpBuilder { + fn new() -> Self { + OpBuilder { + last: Point { x: 0., y: 0. }, + compability_section: false, + ops: Vec::new() + } + } + fn parse(&mut self, data: &[u8], resolve: &impl Resolve) -> Result<()> { + let mut lexer = Lexer::new(data); + let mut buffer = Vec::with_capacity(5); + + loop { + let backup_pos = lexer.get_pos(); + let obj = parse_with_lexer(&mut lexer, resolve, ParseFlags::ANY); + match obj { + Ok(obj) => { + // Operand + buffer.push(obj) + } + Err(e) => { + if e.is_eof() { + break; + } + // It's not an object/operand - treat it as an operator. + lexer.set_pos(backup_pos); + let op = t!(lexer.next()); + let operator = t!(op.as_str(), op); + match self.add(operator, buffer.drain(..), &mut lexer, resolve) { + Ok(()) => {}, + Err(e) if resolve.options().allow_invalid_ops => { + warn!("OP Err: {:?}", e); + }, + Err(e) => return Err(e), + } + } + } + match lexer.get_pos().cmp(&data.len()) { + Ordering::Greater => err!(PdfError::ContentReadPastBoundary), + Ordering::Less => (), + Ordering::Equal => break + } + } + Ok(()) + } + fn add(&mut self, op: &str, mut args: impl Iterator, lexer: &mut Lexer, resolve: &impl Resolve) -> Result<()> { + use Winding::*; + + let ops = &mut self.ops; + let mut push = move |op| ops.push(op); + + match op { + "b" => { + push(Op::Close); + push(Op::FillAndStroke { winding: NonZero }); + }, + "B" => push(Op::FillAndStroke { winding: NonZero }), + "b*" => { + push(Op::Close); + push(Op::FillAndStroke { winding: EvenOdd }); + } + "B*" => push(Op::FillAndStroke { winding: EvenOdd }), + "BDC" => push(Op::BeginMarkedContent { + tag: name(&mut args)?, + properties: Some(args.next().ok_or(PdfError::NoOpArg)?) + }), + "BI" => push(Op::InlineImage { image: inline_image(lexer, resolve)? }), + "BMC" => push(Op::BeginMarkedContent { + tag: name(&mut args)?, + properties: None + }), + "BT" => push(Op::BeginText), + "BX" => self.compability_section = true, + "c" => { + points!(args, c1, c2, p); + push(Op::CurveTo { c1, c2, p }); + self.last = p; + } + "cm" => { + numbers!(args, a, b, c, d, e, f); + push(Op::Transform { matrix: Matrix { a, b, c, d, e, f }}); + } + "CS" => { + names!(args, name); + push(Op::StrokeColorSpace { name }); + } + "cs" => { + names!(args, name); + push(Op::FillColorSpace { name }); + } + "d" => { + let p = args.next().ok_or(PdfError::NoOpArg)?; + let pattern = p.as_array()?.iter().map(|p| p.as_number()).collect::, PdfError>>()?; + let phase = args.next().ok_or(PdfError::NoOpArg)?.as_number()?; + push(Op::Dash { pattern, phase }); + } + "d0" => {} + "d1" => {} + "Do" | "Do0" => { + names!(args, name); + push(Op::XObject { name }); + } + "DP" => push(Op::MarkedContentPoint { + tag: name(&mut args)?, + properties: Some(args.next().ok_or(PdfError::NoOpArg)?) + }), + "EI" => bail!("Parse Error. Unexpected 'EI'"), + "EMC" => push(Op::EndMarkedContent), + "ET" => push(Op::EndText), + "EX" => self.compability_section = false, + "f" | + "F" => push(Op::Fill { winding: NonZero }), + "f*" => push(Op::Fill { winding: EvenOdd }), + "G" => push(Op::StrokeColor { color: Color::Gray(number(&mut args)?) }), + "g" => push(Op::FillColor { color: Color::Gray(number(&mut args)?) }), + "gs" => push(Op::GraphicsState { name: name(&mut args)? }), + "h" => push(Op::Close), + "i" => push(Op::Flatness { tolerance: number(&mut args)? }), + "ID" => bail!("Parse Error. Unexpected 'ID'"), + "j" => { + let n = args.next().ok_or(PdfError::NoOpArg)?.as_integer()?; + let join = match n { + 0 => LineJoin::Miter, + 1 => LineJoin::Round, + 2 => LineJoin::Bevel, + _ => bail!("invalid line join {}", n) + }; + push(Op::LineJoin { join }); + } + "J" => { + let n = args.next().ok_or(PdfError::NoOpArg)?.as_integer()?; + let cap = match n { + 0 => LineCap::Butt, + 1 => LineCap::Round, + 2 => LineCap::Square, + _ => bail!("invalid line cap {}", n) + }; + push(Op::LineCap { cap }); + } + "K" => { + let color = Color::Cmyk(cmyk(&mut args)?); + push(Op::StrokeColor { color }); + } + "k" => { + let color = Color::Cmyk(cmyk(&mut args)?); + push(Op::FillColor { color }); + } + "l" => { + let p = point(&mut args)?; + push(Op::LineTo { p }); + self.last = p; + } + "m" => { + let p = point(&mut args)?; + push(Op::MoveTo { p }); + self.last = p; + } + "M" => push(Op::MiterLimit { limit: number(&mut args)? }), + "MP" => push(Op::MarkedContentPoint { tag: name(&mut args)?, properties: None }), + "n" => push(Op::EndPath), + "q" => push(Op::Save), + "Q" => push(Op::Restore), + "re" => push(Op::Rect { rect: rect(&mut args)? }), + "RG" => push(Op::StrokeColor { color: Color::Rgb(rgb(&mut args)?) }), + "rg" => push(Op::FillColor { color: Color::Rgb(rgb(&mut args)?) }), + "ri" => { + let s = name(&mut args)?; + let intent = RenderingIntent::from_str(&s) + .ok_or_else(|| PdfError::Other { msg: format!("invalid rendering intent {}", s) })?; + push(Op::RenderingIntent { intent }); + }, + "s" => { + push(Op::Close); + push(Op::Stroke); + } + "S" => push(Op::Stroke), + "SC" | "SCN" => { + push(Op::StrokeColor { color: Color::Other(args.collect()) }); + } + "sc" | "scn" => { + push(Op::FillColor { color: Color::Other(args.collect()) }); + } + "sh" => { + + } + "T*" => push(Op::TextNewline), + "Tc" => push(Op::CharSpacing { char_space: number(&mut args)? }), + "Td" => push(Op::MoveTextPosition { translation: point(&mut args)? }), + "TD" => { + let translation = point(&mut args)?; + push(Op::Leading { leading: -translation.y }); + push(Op::MoveTextPosition { translation }); + } + "Tf" => push(Op::TextFont { name: name(&mut args)?, size: number(&mut args)? }), + "Tj" => push(Op::TextDraw { text: string(&mut args)? }), + "TJ" => { + let mut result = Vec::::new(); + + for spacing_or_text in array(&mut args)?.into_iter() { + let spacing_or_text = match spacing_or_text { + Primitive::Integer(i) => TextDrawAdjusted::Spacing(i as f32), + Primitive::Number(f) => TextDrawAdjusted::Spacing(f), + Primitive::String(text) => TextDrawAdjusted::Text(text), + p => bail!("invalid primitive in TJ operator: {:?}", p) + }; + + result.push(spacing_or_text); + } + + push(Op::TextDrawAdjusted { array: result }) + } + "TL" => push(Op::Leading { leading: number(&mut args)? }), + "Tm" => push(Op::SetTextMatrix { matrix: matrix(&mut args)? }), + "Tr" => { + use TextMode::*; + + let n = args.next().ok_or(PdfError::NoOpArg)?.as_integer()?; + let mode = match n { + 0 => Fill, + 1 => Stroke, + 2 => FillThenStroke, + 3 => Invisible, + 4 => FillAndClip, + 5 => StrokeAndClip, + _ => { + bail!("Invalid text render mode: {}", n); + } + }; + push(Op::TextRenderMode { mode }); + } + "Ts" => push(Op::TextRise { rise: number(&mut args)? }), + "Tw" => push(Op::WordSpacing { word_space: number(&mut args)? }), + "Tz" => push(Op::TextScaling { horiz_scale: number(&mut args)? }), + "v" => { + points!(args, c2, p); + push(Op::CurveTo { c1: self.last, c2, p }); + self.last = p; + } + "w" => push(Op::LineWidth { width: number(&mut args)? }), + "W" => push(Op::Clip { winding: NonZero }), + "W*" => push(Op::Clip { winding: EvenOdd }), + "y" => { + points!(args, c1, p); + push(Op::CurveTo { c1, c2: p, p }); + self.last = p; + } + "'" => { + push(Op::TextNewline); + push(Op::TextDraw { text: string(&mut args)? }); + } + "\"" => { + push(Op::WordSpacing { word_space: number(&mut args)? }); + push(Op::CharSpacing { char_space: number(&mut args)? }); + push(Op::TextNewline); + push(Op::TextDraw { text: string(&mut args)? }); + } + o if !self.compability_section => { + bail!("invalid operator {}", o) + }, + _ => {} + } + Ok(()) + } +} + +impl Object for Content { + /// Convert primitive to Self + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + type ContentStream = Stream<()>; + let mut parts: Vec = vec![]; + + match p { + Primitive::Array(arr) => { + for p in arr { + let part = t!(ContentStream::from_primitive(p, resolve)); + parts.push(part); + } + } + Primitive::Reference(r) => return Self::from_primitive(t!(resolve.resolve(r)), resolve), + p => { + let part = t!(ContentStream::from_primitive(p, resolve)); + parts.push(part); + } + } + + Ok(Content { parts }) + } +} + +#[derive(Debug, DataSize, DeepClone, Clone)] +pub struct FormXObject { + pub stream: Stream, +} +impl FormXObject { + pub fn dict(&self) -> &FormDict { + &self.stream.info.info + } + pub fn operations(&self, resolve: &impl Resolve) -> Result> { + let mut ops = OpBuilder::new(); + let data = self.stream.data(resolve)?; + t!(ops.parse(&data, resolve)); + Ok(ops.ops) + } +} +impl Object for FormXObject { + /// Convert primitive to Self + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let stream = t!(Stream::::from_primitive(p, resolve)); + Ok(FormXObject { + stream, + }) + } +} +impl ObjectWrite for FormXObject { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let mut stream = self.stream.to_pdf_stream(update)?; + stream.info.insert("Subtype", Name::from("Form")); + Ok(stream.into()) + } +} + +macro_rules! write_ln_indented { + ($f:expr, $marked_depth:expr, $q_depth:expr, $t_depth:expr, $($arg:tt)*) => {{ + let mut data = Vec::new(); + write!(data, "{}{}", " ".repeat(2 * $marked_depth as usize + 4 * $q_depth as usize + 2 * $t_depth as usize), format!($($arg)*))?; + let string = String::from_utf8(data)?; + $f.push(string); + Ok(()) as Result<(), PdfError> + }} +} +fn format_text(text: &PdfString) -> Result { + let mut data = Vec::new(); + text.serialize(&mut data)?; + String::from_utf8(data).map_err(|e| PdfError::from(e)) +} + +pub fn display_ops(mut ops: &[Op]) -> Result> { + use std::io::Write; + + let mut data: Vec = Vec::new(); + let mut current_point = None; + let f = &mut data; + let mut q_depth: u32 = 0; + let mut marked_depth: u32 = 0; + let mut t_depth: u32 = 0; + + while ops.len() > 0 { + let mut advance = 1; + match ops[0] { + Op::BeginMarkedContent { ref tag, properties: Some(ref name) } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} BDC", tag, name)?; + marked_depth += 1; + } + Op::BeginMarkedContent { ref tag, properties: None } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} BMC", tag)?; + marked_depth += 1; + } + Op::MarkedContentPoint { ref tag, properties: Some(ref name) } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} DP", tag, tag)?; + } + Op::MarkedContentPoint { ref tag, properties: None } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} MP", tag)?; + } + Op::EndMarkedContent => { + marked_depth -= 1; + write_ln_indented!(f, marked_depth, q_depth, t_depth, "EMC")?; + }, + Op::Close => match ops.get(1) { + Some(Op::Stroke) => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "s")?; + advance += 1; + } + Some(Op::FillAndStroke { winding: Winding::NonZero }) => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "b")?; + advance += 1; + } + Some(Op::FillAndStroke { winding: Winding::EvenOdd }) => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "b*")?; + advance += 1; + } + _ => write_ln_indented!(f, marked_depth, q_depth, t_depth, "h")?, + } + Op::MoveTo { p } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} m", p)?; + current_point = Some(p); + } + Op::LineTo { p } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} l", p)?; + current_point = Some(p); + }, + Op::CurveTo { c1, c2, p } => { + if Some(c1) == current_point { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} v", c2, p)?; + } else if c2 == p { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} y", c1, p)?; + } else { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} {} c", c1, c2, p)?; + } + current_point = Some(p); + }, + Op::Rect { rect } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} re", rect)?, + Op::EndPath => write_ln_indented!(f, marked_depth, q_depth, t_depth, "n")?, + Op::Stroke => write_ln_indented!(f, marked_depth, q_depth, t_depth, "S")?, + Op::FillAndStroke { winding: Winding::NonZero } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "B")?, + Op::FillAndStroke { winding: Winding::EvenOdd } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "B*")?, + Op::Fill { winding: Winding::NonZero } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "f")?, + Op::Fill { winding: Winding::EvenOdd } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "f*")?, + Op::Shade { ref name } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} sh", name)?, + Op::Clip { winding: Winding::NonZero } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "W")?, + Op::Clip { winding: Winding::EvenOdd } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "W*")?, + Op::Save => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "q")?; + q_depth += 1; + }, + Op::Restore => { + q_depth = q_depth.saturating_sub(1); + write_ln_indented!(f, marked_depth, q_depth, t_depth, "Q")?; + }, + Op::Transform { matrix } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} cm", matrix)?, + Op::LineWidth { width } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} w", width)?, + Op::Dash { ref pattern, phase } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "[{}] {} d", pattern.iter().format(" "), phase)?, + Op::LineJoin { join } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} j", join as u8)?, + Op::LineCap { cap } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} J", cap as u8)?, + Op::MiterLimit { limit } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} M", limit)?, + Op::Flatness { tolerance } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} i", tolerance)?, + Op::GraphicsState { ref name } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} gs", name)?, + Op::StrokeColor { color: Color::Gray(g) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} G", g)?, + Op::StrokeColor { color: Color::Rgb(rgb) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} RG", rgb)?, + Op::StrokeColor { color: Color::Cmyk(cmyk) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} K", cmyk)?, + Op::StrokeColor { color: Color::Other(ref args) } => { + let args_str = args.iter().map(|p| format!("{}", p)).collect::>().join(" "); + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{}{} SCN", args_str, " ")?; + } + Op::FillColor { color: Color::Gray(g) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} g", g)?, + Op::FillColor { color: Color::Rgb(rgb) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} rg", rgb)?, + Op::FillColor { color: Color::Cmyk(cmyk) } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} k", cmyk)?, + Op::FillColor { color: Color::Other(ref args) } => { + let args_str = args.iter().map(|p| format!("{}", p)).collect::>().join(" "); + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{}{} scn", args_str, " ")?; + } + Op::FillColorSpace { ref name } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} cs", name)?, + Op::StrokeColorSpace { ref name } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} CS", name)?, + Op::RenderingIntent { intent } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} ri", intent.to_str())?, + Op::BeginText => {write_ln_indented!(f, marked_depth, q_depth, t_depth, "BT")?; t_depth = t_depth.saturating_add(1)}, + Op::EndText => {t_depth = t_depth.saturating_sub(1); write_ln_indented!(f, marked_depth, q_depth, t_depth, "ET")?;}, + Op::CharSpacing { char_space } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tc", char_space)?, + Op::WordSpacing { word_space } => { + if let [Op::CharSpacing { char_space }, Op::TextNewline, Op::TextDraw { ref text }, ..] = ops[1..] { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} {:} \"", word_space, char_space, format_text(text)?)?; + advance += 3; + } else { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tw", word_space)?; + } + } + Op::TextScaling { horiz_scale } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tz", horiz_scale)?, + Op::Leading { leading } => match ops[1..] { + [Op::MoveTextPosition { translation }, ..] if leading == -translation.x => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} TD", translation.x, translation.y)?; + advance += 1; + } + _ => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} TL", leading)?, + } + Op::TextFont { ref name, ref size } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} Tf", name, size)?, + Op::TextRenderMode { mode } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tr", mode as u8)?, + Op::TextRise { rise } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Ts", rise)?, + Op::MoveTextPosition { translation } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} {} Td", translation.x, translation.y)?, + Op::SetTextMatrix { matrix } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tm", matrix)?, + Op::TextNewline => { + if let [Op::TextDraw { ref text }, ..] = ops[1..] { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} '", format_text(text)?)?; + advance += 1; + } else { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "T*")?; + } + }, + Op::TextDraw { ref text } => { + write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Tj", format_text(text)?)?; + }, + Op::TextDrawAdjusted { ref array } => { + let content = array.iter().enumerate() + .map(|(i, val)| match val { + TextDrawAdjusted::Spacing(s) => s.to_string(), + TextDrawAdjusted::Text(data) => format_text(data).unwrap_or(String::from("!!!")), + }) + .collect::>() + .join(" "); + write_ln_indented!(f, marked_depth, q_depth, t_depth, "[{}] TJ", content)?; + }, + Op::InlineImage { image: _ } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "Inline image is not implemented yet!")?, + Op::XObject { ref name } => write_ln_indented!(f, marked_depth, q_depth, t_depth, "{} Do", name)?, + } + ops = &ops[advance..]; + } + Ok(data) +} + + +#[allow(clippy::float_cmp)] // TODO +pub fn serialize_ops(mut ops: &[Op]) -> Result> { + use std::io::Write; + + let mut data = Vec::new(); + let mut current_point = None; + let f = &mut data; + + while ops.len() > 0 { + let mut advance = 1; + match ops[0] { + Op::BeginMarkedContent { ref tag, properties: Some(ref name) } => { + serialize_name(tag, f)?; + write!(f, " ")?; + name.serialize(f)?; + writeln!(f, " BDC")?; + } + Op::BeginMarkedContent { ref tag, properties: None } => { + serialize_name(tag, f)?; + writeln!(f, " BMC")?; + } + Op::MarkedContentPoint { ref tag, properties: Some(ref name) } => { + serialize_name(tag, f)?; + write!(f, " ")?; + name.serialize(f)?; + writeln!(f, " DP")?; + } + Op::MarkedContentPoint { ref tag, properties: None } => { + serialize_name(tag, f)?; + writeln!(f, " MP")?; + } + Op::EndMarkedContent => writeln!(f, "EMC")?, + Op::Close => match ops.get(1) { + Some(Op::Stroke) => { + writeln!(f, "s")?; + advance += 1; + } + Some(Op::FillAndStroke { winding: Winding::NonZero }) => { + writeln!(f, "b")?; + advance += 1; + } + Some(Op::FillAndStroke { winding: Winding::EvenOdd }) => { + writeln!(f, "b*")?; + advance += 1; + } + _ => writeln!(f, "h")?, + } + Op::MoveTo { p } => { + writeln!(f, "{} m", p)?; + current_point = Some(p); + } + Op::LineTo { p } => { + writeln!(f, "{} l", p)?; + current_point = Some(p); + }, + Op::CurveTo { c1, c2, p } => { + if Some(c1) == current_point { + writeln!(f, "{} {} v", c2, p)?; + } else if c2 == p { + writeln!(f, "{} {} y", c1, p)?; + } else { + writeln!(f, "{} {} {} c", c1, c2, p)?; + } + current_point = Some(p); + }, + Op::Rect { rect } => writeln!(f, "{} re", rect)?, + Op::EndPath => writeln!(f, "n")?, + Op::Stroke => writeln!(f, "S")?, + Op::FillAndStroke { winding: Winding::NonZero } => writeln!(f, "B")?, + Op::FillAndStroke { winding: Winding::EvenOdd } => writeln!(f, "B*")?, + Op::Fill { winding: Winding::NonZero } => writeln!(f, "f")?, + Op::Fill { winding: Winding::EvenOdd } => writeln!(f, "f*")?, + Op::Shade { ref name } => { + serialize_name(name, f)?; + writeln!(f, " sh")?; + }, + Op::Clip { winding: Winding::NonZero } => writeln!(f, "W")?, + Op::Clip { winding: Winding::EvenOdd } => writeln!(f, "W*")?, + Op::Save => writeln!(f, "q")?, + Op::Restore => writeln!(f, "Q")?, + Op::Transform { matrix } => writeln!(f, "{} cm", matrix)?, + Op::LineWidth { width } => writeln!(f, "{} w", width)?, + Op::Dash { ref pattern, phase } => writeln!(f, "[{}] {} d", pattern.iter().format(" "), phase)?, + Op::LineJoin { join } => writeln!(f, "{} j", join as u8)?, + Op::LineCap { cap } => writeln!(f, "{} J", cap as u8)?, + Op::MiterLimit { limit } => writeln!(f, "{} M", limit)?, + Op::Flatness { tolerance } => writeln!(f, "{} i", tolerance)?, + Op::GraphicsState { ref name } => { + serialize_name(name, f)?; + writeln!(f, " gs")?; + }, + Op::StrokeColor { color: Color::Gray(g) } => writeln!(f, "{} G", g)?, + Op::StrokeColor { color: Color::Rgb(rgb) } => writeln!(f, "{} RG", rgb)?, + Op::StrokeColor { color: Color::Cmyk(cmyk) } => writeln!(f, "{} K", cmyk)?, + Op::StrokeColor { color: Color::Other(ref args) } => { + for p in args { + p.serialize(f)?; + write!(f, " ")?; + } + writeln!(f, "SCN")?; + } + Op::FillColor { color: Color::Gray(g) } => writeln!(f, "{} g", g)?, + Op::FillColor { color: Color::Rgb(rgb) } => writeln!(f, "{} rg", rgb)?, + Op::FillColor { color: Color::Cmyk(cmyk) } => writeln!(f, "{} k", cmyk)?, + Op::FillColor { color: Color::Other(ref args) } => { + for p in args { + p.serialize(f)?; + write!(f, " ")?; + } + writeln!(f, "scn")?; + } + Op::FillColorSpace { ref name } => { + serialize_name(name, f)?; + writeln!(f, " cs")?; + }, + Op::StrokeColorSpace { ref name } => { + serialize_name(name, f)?; + writeln!(f, " CS")?; + }, + + Op::RenderingIntent { intent } => writeln!(f, "{} ri", intent.to_str())?, + Op::BeginText => writeln!(f, "BT")?, + Op::EndText => writeln!(f, "ET")?, + Op::CharSpacing { char_space } => writeln!(f, "{} Tc", char_space)?, + Op::WordSpacing { word_space } => { + if let [ + Op::CharSpacing { char_space }, + Op::TextNewline, + Op::TextDraw { ref text }, + .. + ] = ops[1..] { + write!(f, "{} {} ", word_space, char_space)?; + text.serialize(f)?; + writeln!(f, " \"")?; + advance += 3; + } else { + writeln!(f, "{} Tw", word_space)?; + } + } + Op::TextScaling { horiz_scale } => writeln!(f, "{} Tz", horiz_scale)?, + Op::Leading { leading } => match ops[1..] { + [Op::MoveTextPosition { translation }, ..] if leading == -translation.x => { + writeln!(f, "{} {} TD", translation.x, translation.y)?; + advance += 1; + } + _ => { + writeln!(f, "{} TL", leading)?; + } + } + Op::TextFont { ref name, ref size } => { + serialize_name(name, f)?; + writeln!(f, " {} Tf", size)?; + }, + Op::TextRenderMode { mode } => writeln!(f, "{} Tr", mode as u8)?, + Op::TextRise { rise } => writeln!(f, "{} Ts", rise)?, + Op::MoveTextPosition { translation } => writeln!(f, "{} {} Td", translation.x, translation.y)?, + Op::SetTextMatrix { matrix } => writeln!(f, "{} Tm", matrix)?, + Op::TextNewline => { + if let [Op::TextDraw { ref text }, ..] = ops[1..] { + text.serialize(f)?; + writeln!(f, " '")?; + advance += 1; + } else { + writeln!(f, "T*")?; + } + }, + Op::TextDraw { ref text } => { + text.serialize(f)?; + writeln!(f, " Tj")?; + }, + Op::TextDrawAdjusted { ref array } => { + write!(f, "[")?; + for (i, val) in array.iter().enumerate() { + if i > 0 { + write!(f, " ")?; + } + match val { + TextDrawAdjusted::Spacing(s) => write!(f, "{s}")?, + TextDrawAdjusted::Text(data) => data.serialize(f)?, + } + } + writeln!(f, "] TJ")?; + }, + Op::InlineImage { image: _ } => unimplemented!(), + Op::XObject { ref name } => { + serialize_name(name, f)?; + writeln!(f, " Do")?; + }, + } + ops = &ops[advance..]; + } + Ok(data) +} + +impl Content { + pub fn from_ops(operations: Vec) -> Self { + let data = serialize_ops(&operations).unwrap(); + Content { + parts: vec![Stream::new((), data)] + } + } +} + +impl ObjectWrite for Content { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + if self.parts.len() == 1 { + let obj = self.parts[0].to_primitive(update)?; + update.create(obj)?.to_primitive(update) + } else { + self.parts.to_primitive(update) + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub enum Winding { + EvenOdd, + NonZero +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub enum LineCap { + Butt = 0, + Round = 1, + Square = 2, +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub enum LineJoin { + Miter = 0, + Round = 1, + Bevel = 2, +} + +#[cfg(feature = "euclid")] +pub struct PdfSpace(); + +#[derive(Debug, Copy, Clone, PartialEq, Default, DataSize)] +#[repr(C, align(8))] +pub struct Point { + pub x: f32, + pub y: f32 +} +impl Display for Point { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {}", self.x, self.y) + } +} +#[cfg(feature = "euclid")] +impl Into> for Point { + fn into(self) -> euclid::Point2D { + let Point { x, y } = self; + + euclid::Point2D::new(x, y) + } +} +#[cfg(feature = "euclid")] +impl From> for Point { + fn from(from: euclid::Point2D) -> Self { + let euclid::Point2D { x, y, .. } = from; + + Point { x, y } + } +} +#[cfg(feature = "euclid")] +impl Into> for Point { + fn into(self) -> euclid::Vector2D { + let Point { x, y } = self; + + euclid::Vector2D::new(x, y) + } +} +#[cfg(feature = "euclid")] +impl From> for Point { + fn from(from: euclid::Vector2D) -> Self { + let euclid::Vector2D { x, y, .. } = from; + + Point { x, y } + } +} + +/// ISO 32000-2:2020(E) Table 58 Pg 186 - ViewRect +/// Path construction operators - {x y width height re} +/// Append a rectangle to the current path as a complete +/// subpath, with lower-left corner (x, y) and dimensions +/// width and height in user space. +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +#[repr(C, align(8))] +pub struct ViewRect { + pub x: f32, + pub y: f32, + pub width: f32, + pub height: f32, +} + +#[deprecated] +pub type Rect = ViewRect; + +impl Display for ViewRect { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {} {} {}", self.x, self.y, self.width, self.height) + } +} +#[cfg(feature = "euclid")] +impl Into> for ViewRect { + fn into(self) -> euclid::Box2D { + let ViewRect { x, y, width, height } = self; + + assert!(width > 0.0); + assert!(height > 0.0); + + euclid::Box2D::new(euclid::Point2D::new(x, y), euclid::Point2D::new(x + width, y + height)) + } +} +#[cfg(feature = "euclid")] +impl From> for ViewRect { + fn from(from: euclid::Box2D) -> Self { + let euclid::Box2D { min: euclid::Point2D { x, y, .. }, max: euclid::Point2D { x: x2, y: y2, .. }, .. } = from; + + assert!(x < x2); + assert!(y < y2); + + ViewRect { + x, y, width: x2 - x, height: y2 - y + } + } +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize, DeepClone)] +#[repr(C, align(8))] +pub struct Matrix { + pub a: f32, + pub b: f32, + pub c: f32, + pub d: f32, + pub e: f32, + pub f: f32, +} +impl Display for Matrix { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {} {} {} {} {}", self.a, self.b, self.c, self.d, self.e, self.f) + } +} +impl Default for Matrix { + fn default() -> Self { + Matrix { + a: 1.0, + b: 0.0, + c: 0.0, + d: 1.0, + e: 0.0, + f: 0.0, + } + } +} +impl Object for Matrix { + fn from_primitive(p: Primitive, _resolve: &impl Resolve) -> Result { + matrix(&mut p.into_array()?.into_iter()) + } +} +impl ObjectWrite for Matrix { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let Matrix { a, b, c, d, e, f } = *self; + Primitive::array::([a, b, c, d, e, f].iter(), update) + } +} +#[cfg(feature = "euclid")] +impl Into> for Matrix { + fn into(self) -> euclid::Transform2D { + let Matrix { a, b, c, d, e, f} = self; + + euclid::Transform2D::new(a, b, c, d, e, f) + } +} +#[cfg(feature = "euclid")] +impl From> for Matrix { + fn from(from: euclid::Transform2D) -> Self { + let euclid::Transform2D { m11: a, m12: b, m21: c, m22: d, m31: e, m32: f, .. } = from; + + Matrix { + a, b, c, d, e, f + } + } +} + +#[derive(Debug, Clone, DataSize)] +pub enum Color { + Gray(f32), + Rgb(Rgb), + Cmyk(Cmyk), + Other(Vec), +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub enum TextMode { + Fill, + Stroke, + FillThenStroke, + Invisible, + FillAndClip, + StrokeAndClip +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub struct Rgb { + pub red: f32, + pub green: f32, + pub blue: f32, +} +impl Display for Rgb { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {} {}", self.red, self.green, self.blue) + } +} + +#[derive(Debug, Copy, Clone, PartialEq, DataSize)] +pub struct Cmyk { + pub cyan: f32, + pub magenta: f32, + pub yellow: f32, + pub key: f32, +} +impl Display for Cmyk { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{} {} {} {}", self.cyan, self.magenta, self.yellow, self.key) + } +} + +#[derive(Debug, Clone, DataSize)] +pub enum TextDrawAdjusted { + Text(PdfString), + Spacing(f32), +} + +impl Display for TextDrawAdjusted { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Text(text) => write!(f, "{:?}", text), + Self::Spacing(spacing) => spacing.fmt(f), + } + } +} + +/// Graphics Operator +/// +/// See PDF32000 A.2 +#[derive(Debug, Clone, DataSize)] +pub enum Op { + /// Begin a marked comtent sequence + /// + /// Pairs with the following EndMarkedContent. + /// + /// generated by operators `BMC` and `BDC` + BeginMarkedContent { tag: Name, properties: Option }, + + /// End a marked content sequence. + /// + /// Pairs with the previous BeginMarkedContent. + /// + /// generated by operator `EMC` + EndMarkedContent, + + /// A marked content point. + /// + /// generated by operators `MP` and `DP`. + MarkedContentPoint { tag: Name, properties: Option }, + + + Close, + MoveTo { p: Point }, + LineTo { p: Point }, + CurveTo { c1: Point, c2: Point, p: Point }, + Rect { rect: ViewRect }, + EndPath, + + Stroke, + + /// Fill and Stroke operation + /// + /// generated by operators `b`, `B`, `b*`, `B*` + /// `close` indicates whether the path should be closed first + FillAndStroke { winding: Winding }, + + + Fill { winding: Winding }, + + /// Fill using the named shading pattern + /// + /// operator: `sh` + Shade { name: Name }, + + Clip { winding: Winding }, + + Save, + Restore, + + Transform { matrix: Matrix }, + + LineWidth { width: f32 }, + Dash { pattern: Vec, phase: f32 }, + LineJoin { join: LineJoin }, + LineCap { cap: LineCap }, + MiterLimit { limit: f32 }, + Flatness { tolerance: f32 }, + + GraphicsState { name: Name }, + + StrokeColor { color: Color }, + FillColor { color: Color }, + + FillColorSpace { name: Name }, + StrokeColorSpace { name: Name }, + + RenderingIntent { intent: RenderingIntent }, + + BeginText, + EndText, + + CharSpacing { char_space: f32 }, + WordSpacing { word_space: f32 }, + TextScaling { horiz_scale: f32 }, + Leading { leading: f32 }, + TextFont { name: Name, size: f32 }, + TextRenderMode { mode: TextMode }, + + /// `Ts` + TextRise { rise: f32 }, + + /// `Td`, `TD` + MoveTextPosition { translation: Point }, + + /// `Tm` + SetTextMatrix { matrix: Matrix }, + + /// `T*` + TextNewline, + + /// `Tj` + TextDraw { text: PdfString }, + + TextDrawAdjusted { array: Vec }, + + XObject { name: Name }, + + InlineImage { image: Arc }, +} + +pub fn deep_clone_op(op: &Op, cloner: &mut impl Cloner, old_resources: &Resources, resources: &mut Resources) -> Result { + match *op { + Op::GraphicsState { ref name } => { + if !resources.graphics_states.contains_key(name) { + if let Some(gs) = old_resources.graphics_states.get(name) { + resources.graphics_states.insert(name.clone(), gs.deep_clone(cloner)?); + } + } + Ok(Op::GraphicsState { name: name.clone() }) + } + Op::MarkedContentPoint { ref tag, ref properties } => { + Ok(Op::MarkedContentPoint { tag: tag.clone(), properties: properties.deep_clone(cloner)? }) + } + Op::BeginMarkedContent { ref tag, ref properties } => { + Ok(Op::BeginMarkedContent { tag: tag.clone(), properties: properties.deep_clone(cloner)? }) + } + Op::TextFont { ref name, size } => { + if !resources.fonts.contains_key(name) { + if let Some(f) = old_resources.fonts.get(name) { + resources.fonts.insert(name.clone(), f.deep_clone(cloner)?); + } + } + Ok(Op::TextFont { name: name.clone(), size }) + } + Op::XObject { ref name } => { + if !resources.xobjects.contains_key(name) { + if let Some(xo) = old_resources.xobjects.get(name) { + resources.xobjects.insert(name.clone(), xo.deep_clone(cloner)?); + } + } + Ok(Op::XObject { name: name.clone() }) + } + ref op => Ok(op.clone()) + } +} + + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_inline_image() { + let data = br###" +/W 768 +/H 150 +/BPC 1 +/IM true +/F [/A85 /Fl] +ID +Gb"0F_%"1Ö"#B1qiGGG^V6GZ#ZkijB5'RjB4S^5I61&$Ni:Xh=4S_9KYN;c9MUZPn/h,c]oCLUmg*Fo?0Hs0nQHp41KkO\Ls5+g0aoD*btT?l]lq0YAucfaoqHp4 +1KkO\Ls5+g0aoD*btT?l^#mD&ORf[0~> +EI +"###; + let mut lexer = Lexer::new(data); + assert!(inline_image(&mut lexer, &NoResolve).is_ok()); + } +} diff --git a/src-pdfrs/pdf/src/crypt.rs b/src-pdfrs/pdf/src/crypt.rs new file mode 100644 index 0000000..562f420 --- /dev/null +++ b/src-pdfrs/pdf/src/crypt.rs @@ -0,0 +1,695 @@ +/// PDF "cryptography" – This is why you don't write your own crypto. + +use crate as pdf; +use aes::cipher::generic_array::{sequence::Split, GenericArray}; +use aes::cipher::{BlockDecryptMut, BlockEncryptMut, KeyIvInit}; +use aes::cipher::block_padding::{NoPadding, Pkcs7}; +use sha2::{Digest, Sha256, Sha384, Sha512}; +use std::fmt; +use std::collections::HashMap; +use datasize::DataSize; +use crate::object::PlainRef; +use crate::primitive::{Dictionary, PdfString, Name}; +use crate::error::{PdfError, Result}; + +type Aes128CbcEnc = cbc::Encryptor; +type Aes128CbcDec = cbc::Decryptor; +type Aes256CbcDec = cbc::Decryptor; + +const PADDING: [u8; 32] = [ + 0x28, 0xBF, 0x4E, 0x5E, 0x4E, 0x75, 0x8A, 0x41, + 0x64, 0x00, 0x4E, 0x56, 0xFF, 0xFA, 0x01, 0x08, + 0x2E, 0x2E, 0x00, 0xB6, 0xD0, 0x68, 0x3E, 0x80, + 0x2F, 0x0C, 0xA9, 0xFE, 0x64, 0x53, 0x69, 0x7A +]; + +#[derive(Copy)] +pub struct Rc4 { + i: u8, + j: u8, + state: [u8; 256] +} + +impl Clone for Rc4 { fn clone(&self) -> Rc4 { *self } } + +impl Rc4 { + pub fn new(key: &[u8]) -> Rc4 { + assert!(!key.is_empty() && key.len() <= 256); + let mut rc4 = Rc4 { i: 0, j: 0, state: [0; 256] }; + for (i, x) in rc4.state.iter_mut().enumerate() { + *x = i as u8; + } + let mut j: u8 = 0; + for i in 0..256 { + j = j.wrapping_add(rc4.state[i]).wrapping_add(key[i % key.len()]); + rc4.state.swap(i, j as usize); + } + rc4 + } + fn next(&mut self) -> u8 { + self.i = self.i.wrapping_add(1); + self.j = self.j.wrapping_add(self.state[self.i as usize]); + self.state.swap(self.i as usize, self.j as usize); + self.state[(self.state[self.i as usize].wrapping_add(self.state[self.j as usize])) as usize] + } + pub fn encrypt(key: &[u8], data: &mut [u8]) { + let mut rc4 = Rc4::new(key); + for b in data.iter_mut() { + *b ^= rc4.next(); + } + } +} + +/// 7.6.1 Table 20 + 7.6.3.2 Table 21 +#[derive(Object, Debug, Clone, DataSize)] +pub struct CryptDict { + #[pdf(key="O")] + o: PdfString, + + #[pdf(key="U")] + u: PdfString, + + #[pdf(key="R")] + r: u32, + + #[pdf(key="P")] + p: i32, + + #[pdf(key="V")] + v: i32, + + #[pdf(key="Length", default="40")] + bits: u32, + + #[pdf(key="CF")] + crypt_filters: HashMap, + + #[pdf(key="StmF")] + default_crypt_filter: Option, + + #[pdf(key="EncryptMetadata", default="true")] + encrypt_metadata: bool, + + #[pdf(key = "OE")] + oe: Option, + + #[pdf(key = "UE")] + ue: Option, + + #[pdf(other)] + _other: Dictionary +} + +#[derive(Object, Debug, Clone, Copy, DataSize)] +pub enum CryptMethod { + None, + V2, + AESV2, + AESV3, +} + +#[derive(Object, Debug, Clone, Copy, DataSize)] +pub enum AuthEvent { + DocOpen, + EFOpen +} + +#[derive(Object, Debug, Clone, DataSize)] +#[pdf(Type="CryptFilter?")] +pub struct CryptFilter { + #[pdf(key="CFM", default="CryptMethod::None")] + pub method: CryptMethod, + + #[pdf(key="AuthEvent", default="AuthEvent::DocOpen")] + pub auth_event: AuthEvent, + + #[pdf(key="Length")] + pub length: Option, + + #[pdf(other)] + _other: Dictionary +} + +pub struct Decoder { + key_size: usize, + key: Vec, // maximum length + method: CryptMethod, + /// A reference to the /Encrypt dictionary, if it is in an indirect + /// object. The strings in this dictionary are not encrypted, so + /// decryption must be skipped when accessing them. + pub(crate) encrypt_indirect_object: Option, + /// A reference to the /Metadata dictionary, if it is an indirect + /// object. If /EncryptMedata is set to false in the /Encrypt dictionary, + /// then the strings in the /Metadata dictionary are not encrypted, so + /// decryption must be skipped when accessing them. + pub(crate) metadata_indirect_object: Option, + /// Whether the metadata is encrypted, as indicated by /EncryptMetadata + /// in the /Encrypt dictionary. + encrypt_metadata: bool, +} +impl Decoder { + pub fn default(dict: &CryptDict, id: &[u8]) -> Result { + Decoder::from_password(dict, id, b"") + } + + fn key(&self) -> &[u8] { + &self.key[.. std::cmp::min(self.key_size, 16)] + } + + pub fn new(key: Vec, key_size: usize, method: CryptMethod, encrypt_metadata: bool) -> Decoder { + Decoder { + key_size, + key, + method, + encrypt_indirect_object: None, + metadata_indirect_object: None, + encrypt_metadata, + } + } + + pub fn from_password(dict: &CryptDict, id: &[u8], pass: &[u8]) -> Result { + fn compute_u_rev_2(key: &[u8]) -> Vec { + // algorithm 4 + let mut data = PADDING.to_vec(); + Rc4::encrypt(key, &mut data); + data + } + + fn check_password_rev_2(document_u: &[u8], key: &[u8]) -> bool { + compute_u_rev_2(key) == document_u + } + + fn compute_u_rev_3_4(id: &[u8], key: &[u8]) -> [u8; 16] { + // algorithm 5 + // a) we derived the key already. + + // b) + let mut hash = md5::Context::new(); + hash.consume(PADDING); + + // c) + hash.consume(id); + + // d) + let mut data = *hash.compute(); + Rc4::encrypt(key, &mut data); + + // e) + for i in 1u8..=19 { + let mut key = key.to_owned(); + for b in &mut key { + *b ^= i; + } + Rc4::encrypt(&key, &mut data); + } + + // f) + data + } + + fn check_password_rev_3_4(document_u: &[u8], id: &[u8], key: &[u8]) -> bool { + document_u.starts_with(&compute_u_rev_3_4(id, key)) + } + + fn check_password_rc4(revision: u32, document_u: &[u8], id: &[u8], key: &[u8]) -> bool { + if revision == 2 { + check_password_rev_2(document_u, key) + } else { + check_password_rev_3_4(document_u, id, key) + } + } + + fn key_derivation_user_password_rc4( + revision: u32, + key_size: usize, + dict: &CryptDict, + id: &[u8], + pass: &[u8], + ) -> Vec { + let o = dict.o.as_bytes(); + let p = dict.p; + // 7.6.3.3 - Algorithm 2 + // a) and b) + let mut hash = md5::Context::new(); + if pass.len() < 32 { + hash.consume(pass); + hash.consume(&PADDING[..32 - pass.len()]); + } else { + hash.consume(&pass[..32]); + } + + // c) + hash.consume(o); + + // d) + hash.consume(p.to_le_bytes()); + + // e) + hash.consume(id); + + // f) + if revision >= 4 && !dict.encrypt_metadata { + hash.consume([0xff, 0xff, 0xff, 0xff]); + } + + // g) + let mut data = *hash.compute(); + + // h) + if revision >= 3 { + for _ in 0..50 { + data = *md5::compute(&data[..std::cmp::min(key_size, 16)]); + } + } + + let mut key = vec![0u8; key_size.max(16)]; + key[..16].copy_from_slice(&data); + key + } + + fn key_derivation_owner_password_rc4( + revision: u32, + key_size: usize, + pass: &[u8], + ) -> Result> { + if key_size > 16 { + bail!("key size > 16"); + } + + let mut hash = md5::Context::new(); + if pass.len() < 32 { + hash.consume(pass); + hash.consume(&PADDING[..32 - pass.len()]); + } else { + hash.consume(&pass[..32]); + } + + if revision >= 3 { + for _ in 0..50 { + let digest = *std::mem::replace(&mut hash, md5::Context::new()).compute(); + hash.consume(digest); + } + } + + let digest = &hash.compute()[..key_size]; + Ok(digest.to_vec()) + } + + let (key_bits, method) = match dict.v { + 1 => (40, CryptMethod::V2), + 2 => { + if dict.bits % 8 != 0 { + err!(other!("invalid key length {}", dict.bits)) + } else { + (dict.bits, CryptMethod::V2) + } + }, + 4 ..= 6 => { + let default = dict + .crypt_filters + .get(try_opt!(dict.default_crypt_filter.as_ref()).as_str()) + .ok_or_else(|| other!("missing crypt filter entry {:?}", dict.default_crypt_filter.as_ref()))?; + + match default.method { + CryptMethod::V2 | CryptMethod::AESV2 => ( + default.length.map(|n| 8 * n).unwrap_or(dict.bits), + default.method, + ), + CryptMethod::AESV3 if dict.v == 5 => ( + default.length.map(|n| 8 * n).unwrap_or(dict.bits), + default.method, + ), + m => err!(other!("unimplemented crypt method {:?}", m)), + } + } + v => err!(other!("unsupported V value {}", v)), + }; + let level = dict.r; + if !(2..=6).contains(&level) { + err!(other!("unsupported standard security handler revision {}", level)) + }; + if level <= 4 { + let key_size = key_bits as usize / 8; + let key = key_derivation_user_password_rc4(level, key_size, dict, id, pass); + + if check_password_rc4(level, dict.u.as_bytes(), id, &key[..std::cmp::min(key_size, 16)]) { + let decoder = Decoder::new(key, key_size, method, dict.encrypt_metadata); + Ok(decoder) + } else { + let password_wrap_key = key_derivation_owner_password_rc4(level, key_size, pass)?; + let mut data = dict.o.as_bytes().to_vec(); + let rounds = if level == 2 { 1u8 } else { 20u8 }; + for round in 0..rounds { + let mut round_key = password_wrap_key.clone(); + for byte in round_key.iter_mut() { + *byte ^= round; + } + Rc4::encrypt(&round_key, &mut data); + } + let unwrapped_user_password = data; + + let key = key_derivation_user_password_rc4( + level, + key_size, + dict, + id, + &unwrapped_user_password, + ); + + if check_password_rc4(level, dict.u.as_bytes(), id, &key[..key_size]) { + let decoder = Decoder::new(key, key_size, method, dict.encrypt_metadata); + Ok(decoder) + } else { + Err(PdfError::InvalidPassword) + } + } + } else if level == 5 || level == 6 { + let u = dict.u.as_bytes(); + if u.len() != 48 { + err!(format!( + "U in Encrypt dictionary should have a length of 48 bytes, not {}", + u.len(), + ) + .into()); + } + let user_hash = &u[0..32]; + let user_validation_salt = &u[32..40]; + let user_key_salt = &u[40..48]; + + let o = dict.o.as_bytes(); + if o.len() != 48 { + err!(format!( + "O in Encrypt dictionary should have a length of 48 bytes, not {}", + o.len(), + ) + .into()); + } + let owner_hash = &o[0..32]; + let owner_validation_salt = &o[32..40]; + let owner_key_salt = &o[40..48]; + + let password_unicode = + t!(String::from_utf8(pass.to_vec()).map_err(|_| PdfError::InvalidPassword)); + let password_prepped = + t!(stringprep::saslprep(&password_unicode).map_err(|_| PdfError::InvalidPassword)); + let mut password_encoded = password_prepped.as_bytes(); + + if password_encoded.len() > 127 { + password_encoded = &password_encoded[..127]; + } + + let ue = t!(dict.ue.as_ref().ok_or_else(|| PdfError::MissingEntry { + typ: "Encrypt", + field: "UE".into(), + })) + .as_bytes() + .to_vec(); + let oe = t!(dict.oe.as_ref().ok_or_else(|| PdfError::MissingEntry { + typ: "Encrypt", + field: "OE".into(), + })) + .as_bytes() + .to_vec(); + + let (intermediate_key, mut wrapped_key) = if level == 6 { + let user_hash_computed = + Self::revision_6_kdf(password_encoded, user_validation_salt, b""); + if user_hash_computed == user_hash { + ( + Self::revision_6_kdf(password_encoded, user_key_salt, b"").into(), + ue, + ) + } else { + let owner_hash_computed = + Self::revision_6_kdf(password_encoded, owner_validation_salt, u); + if owner_hash_computed == owner_hash { + ( + Self::revision_6_kdf(password_encoded, owner_key_salt, u).into(), + oe, + ) + } else { + err!(PdfError::InvalidPassword); + } + } + } else { + // level == 5 + + let mut user_check_hash = Sha256::new(); + user_check_hash.update(password_encoded); + user_check_hash.update(user_validation_salt); + let user_hash_computed = user_check_hash.finalize(); + #[allow(clippy::branches_sharing_code)] + if user_hash_computed.as_slice() == user_hash { + let mut intermediate_kdf_hash = Sha256::new(); + intermediate_kdf_hash.update(password_encoded); + intermediate_kdf_hash.update(user_key_salt); + (intermediate_kdf_hash.finalize(), ue) + } else { + let mut owner_check_hash = Sha256::new(); + owner_check_hash.update(password_encoded); + owner_check_hash.update(owner_validation_salt); + owner_check_hash.update(u); + let owner_hash_computed = owner_check_hash.finalize(); + if owner_hash_computed.as_slice() == owner_hash { + let mut intermediate_kdf_hash = Sha256::new(); + intermediate_kdf_hash.update(password_encoded); + intermediate_kdf_hash.update(owner_key_salt); + intermediate_kdf_hash.update(u); + (intermediate_kdf_hash.finalize(), oe) + } else { + err!(PdfError::InvalidPassword); + } + } + }; + + let zero_iv = GenericArray::from_slice(&[0u8; 16]); + let key_slice = t!(Aes256CbcDec::new(&intermediate_key, zero_iv) + .decrypt_padded_mut::(&mut wrapped_key) + .map_err(|_| PdfError::InvalidPassword)); + + let decoder = Decoder::new(key_slice.into(), 32, method, dict.encrypt_metadata); + Ok(decoder) + } else { + err!(format!("unsupported V value {}", level).into()) + } + } + + fn revision_6_kdf(password: &[u8], salt: &[u8], u: &[u8]) -> [u8; 32] { + let mut data = [0u8; (128 + 64 + 48) * 64]; + let mut data_total_len = 0; + + let mut sha256 = Sha256::new(); + let mut sha384 = Sha384::new(); + let mut sha512 = Sha512::new(); + + let mut input_sha256 = Sha256::new(); + input_sha256.update(password); + input_sha256.update(salt); + input_sha256.update(u); + let input = input_sha256.finalize(); + let (mut key, mut iv) = input.split(); + + let mut block = [0u8; 64]; + let mut block_size = 32; + (block[..block_size]).copy_from_slice(&input[..block_size]); + + let mut i = 0; + while i < 64 || i < data[data_total_len - 1] as usize + 32 { + let aes = Aes128CbcEnc::new(&key, &iv); + let data_repeat_len = password.len() + block_size + u.len(); + data[..password.len()].copy_from_slice(password); + data[password.len()..password.len() + block_size].copy_from_slice(&block[..block_size]); + data[password.len() + block_size..data_repeat_len].copy_from_slice(u); + for j in 1..64 { + data.copy_within(..data_repeat_len, j * data_repeat_len); + } + data_total_len = data_repeat_len * 64; + + // The plaintext length will always be a multiple of the block size, unwrap is okay + let encrypted = aes + .encrypt_padded_mut::(&mut data[..data_total_len], data_total_len) + .unwrap(); + + let sum: usize = encrypted[..16].iter().map(|byte| *byte as usize).sum(); + block_size = sum % 3 * 16 + 32; + match block_size { + 32 => { + sha256.update(encrypted); + (block[..block_size]).copy_from_slice(&sha256.finalize_reset()); + } + 48 => { + sha384.update(encrypted); + (block[..block_size]).copy_from_slice(&sha384.finalize_reset()); + } + 64 => { + sha512.update(encrypted); + (block[..block_size]).copy_from_slice(&sha512.finalize_reset()); + } + _ => unreachable!(), + } + + key.copy_from_slice(&block[..16]); + iv.copy_from_slice(&block[16..32]); + + i += 1; + } + let mut hash = [0u8; 32]; + hash.copy_from_slice(&block[..32]); + hash + } + + pub fn decrypt<'buf>(&self, id: PlainRef, data: &'buf mut [u8]) -> Result<&'buf [u8]> { + if self.encrypt_indirect_object == Some(id) { + // Strings inside the /Encrypt dictionary are not encrypted + return Ok(data); + } + + if !self.encrypt_metadata && self.metadata_indirect_object == Some(id) { + // Strings inside the /Metadata dictionary are not encrypted when /EncryptMetadata is + // false + return Ok(data); + } + + if data.is_empty() { + return Ok(data); + } + + // Algorithm 1 + // a) we have those already + + match self.method { + CryptMethod::None => unreachable!(), + CryptMethod::V2 => { + // b) + let mut key = [0; 16 + 5]; + let n = self.key().len(); + key[..n].copy_from_slice(self.key()); + key[n..n + 3].copy_from_slice(&id.id.to_le_bytes()[..3]); + key[n + 3..n + 5].copy_from_slice(&id.gen.to_le_bytes()[..2]); + + // c) + let key = *md5::compute(&key[..n + 5]); + + // d) + Rc4::encrypt(&key[..(n + 5).min(16)], data); + Ok(data) + } + CryptMethod::AESV2 => { + // b) + let mut key = [0; 32 + 5 + 4]; + let n = std::cmp::min(self.key_size, 16); + key[..n].copy_from_slice(self.key()); + key[n..n + 3].copy_from_slice(&id.id.to_le_bytes()[..3]); + key[n + 3..n + 5].copy_from_slice(&id.gen.to_le_bytes()[..2]); + key[n + 5..n + 9].copy_from_slice(b"sAlT"); + + // c) + let key = *md5::compute(&key[..n + 9]); + + // d) + let key = &key[..(n + 5).min(16)]; + if data.len() < 16 { + return Err(PdfError::DecryptionFailure); + } + let (iv, ciphertext) = data.split_at_mut(16); + let cipher = + t!(Aes128CbcDec::new_from_slices(key, iv).map_err(|_| PdfError::DecryptionFailure)); + Ok(t!(cipher + .decrypt_padded_mut::(ciphertext) + .map_err(|_| PdfError::DecryptionFailure))) + } + CryptMethod::AESV3 => { + if data.len() < 16 { + return Err(PdfError::DecryptionFailure); + } + let (iv, ciphertext) = data.split_at_mut(16); + let cipher = + t!(Aes256CbcDec::new_from_slices(self.key(), iv).map_err(|_| PdfError::DecryptionFailure)); + Ok(t!(cipher + .decrypt_padded_mut::(ciphertext) + .map_err(|_| PdfError::DecryptionFailure))) + } + } + } +} +impl fmt::Debug for Decoder { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Decoder") + .field("key", &self.key()) + .field("method", &self.method) + .finish() + } +} + +#[cfg(test)] +mod tests { + #[test] + fn unencrypted_strings() { + let data_prefix = b"%PDF-1.5\n\ + 1 0 obj\n\ + << /Type /Catalog /Pages 2 0 R >>\n\ + endobj\n\ + 2 0 obj\n\ + << /Type /Pages /Kids [3 0 R] /Count 1 >>\n\ + endobj\n\ + 3 0 obj\n\ + << /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R >>\n\ + endobj\n\ + 4 0 obj\n\ + << /Length 0 >>\n\ + stream\n\ + endstream\n\ + endobj\n\ + 5 0 obj\n\ + <<\n\ + /V 4\n\ + /CF <<\n\ + /StdCF << /Type /CryptFilter /CFM /V2 >>\n\ + >>\n\ + /StmF /StdCF\n\ + /StrF /StdCF\n\ + /R 4\n\ + /O (owner pwd hash!!)\n\ + /U \n\ + /P -4\n\ + >>\n\ + endobj\n\ + xref\n\ + 1 5\n"; + let mut data = data_prefix.to_vec(); + for obj_nr in 1..=5 { + let needle = format!("\n{} 0 obj\n", obj_nr).into_bytes(); + let offset = data_prefix + .windows(needle.len()) + .position(|w| w == needle) + .unwrap() + + 1; + let mut line = format!("{:010} {:05} n\r\n", offset, 0).into_bytes(); + assert_eq!(line.len(), 20); + data.append(&mut line); + } + let trailer_snippet = b"trailer\n\ + <<\n\ + /Size 6\n\ + /Root 1 0 R\n\ + /Encrypt 5 0 R\n\ + /ID [ ]\n\ + >>\n\ + startxref\n"; + data.extend_from_slice(trailer_snippet); + let xref_offset = data_prefix + .windows("xref".len()) + .rposition(|w| w == b"xref") + .unwrap(); + data.append(&mut format!("{}\n%%EOF", xref_offset).into_bytes()); + + let file = crate::file::FileOptions::uncached().load(data).unwrap(); + + // PDF reference says strings in the encryption dictionary are "not + // encrypted by the usual methods." + assert_eq!( + file.trailer.encrypt_dict.unwrap().o.as_ref(), + b"owner pwd hash!!", + ); + } +} diff --git a/src-pdfrs/pdf/src/data/t01_lzw+base85.txt b/src-pdfrs/pdf/src/data/t01_lzw+base85.txt new file mode 100644 index 0000000..16c46f8 --- /dev/null +++ b/src-pdfrs/pdf/src/data/t01_lzw+base85.txt @@ -0,0 +1,12 @@ +J..)6T`?p&c!Jnl@ +RM]WM;jjH6Gnc75idkL5]+cPZKEBPWdR>FF(kj1_R%W_d +&/jS!;iuad7h?[L-F$+]]0A3Ck*$I0KZ?;<)CJtqi65Xb +Vc3\n5ua:Q/=0$W<#N3U;H,MQKqfg1?:lUpR;6oN[C2E4 +ZNr8Udn.'p+?#X+1>0Kuk$bCDF/(3fL5]Oq)^kJZ!C2H1 +'TO]Rl?Q:&'<5&iP!$Rq;BXRecDN[IJB`,)o8XJOSJ9sD +S]hQ;Rj@!ND)bD_q&C\g:inYC%)&u#:u,M6Bm%IY!Kb1+ +":aAa'S`ViJglLb8iG1p&i;eVoK&juJHs9%;Xomop"5KatWRT"JQ#qYuL, +JD?M$0QP)lKn06l1apKDC@\qJ4B!!(5m+j.7F790m(Vj8 +8l8Q:_CZ(Gm1%X\N1&u!FKHMB~> \ No newline at end of file diff --git a/src-pdfrs/pdf/src/data/t01_plain.txt b/src-pdfrs/pdf/src/data/t01_plain.txt new file mode 100644 index 0000000..e69de29 diff --git a/src-pdfrs/pdf/src/enc.rs b/src-pdfrs/pdf/src/enc.rs new file mode 100644 index 0000000..d976fa4 --- /dev/null +++ b/src-pdfrs/pdf/src/enc.rs @@ -0,0 +1,656 @@ +#![allow(clippy::many_single_char_names)] +#![allow(dead_code)] // TODO + +use itertools::Itertools; + +use crate as pdf; +use crate::error::*; +use crate::object::{Object, Resolve, Stream}; +use crate::primitive::{Primitive, Dictionary}; +use std::convert::{TryFrom, TryInto}; +use std::io::{Read, Write}; +use once_cell::sync::OnceCell; +use datasize::DataSize; + + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct LZWFlateParams { + #[pdf(key="Predictor", default="1")] + pub predictor: i32, + #[pdf(key="Colors", default="1")] + pub n_components: i32, + #[pdf(key="BitsPerComponent", default="8")] + pub bits_per_component: i32, + #[pdf(key="Columns", default="1")] + pub columns: i32, + #[pdf(key="EarlyChange", default="1")] + pub early_change: i32, +} +impl Default for LZWFlateParams { + fn default() -> LZWFlateParams { + LZWFlateParams { + predictor: 1, + n_components: 1, + bits_per_component: 8, + columns: 1, + early_change: 1 + } + } +} + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct DCTDecodeParams { + // TODO The default value of ColorTransform is 1 if the image has three components and 0 otherwise. + // 0: No transformation. + // 1: If the image has three color components, transform RGB values to YUV before encoding and from YUV to RGB after decoding. + // If the image has four components, transform CMYK values to YUVK before encoding and from YUVK to CMYK after decoding. + // This option is ignored if the image has one or two color components. + #[pdf(key="ColorTransform")] + pub color_transform: Option, +} + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct CCITTFaxDecodeParams { + #[pdf(key="K", default="0")] + pub k: i32, + + #[pdf(key="EndOfLine", default="false")] + pub end_of_line: bool, + + #[pdf(key="EncodedByteAlign", default="false")] + pub encoded_byte_align: bool, + + #[pdf(key="Columns", default="1728")] + pub columns: u32, + + #[pdf(key="Rows", default="0")] + pub rows: u32, + + #[pdf(key="EndOfBlock", default="true")] + pub end_of_block: bool, + + #[pdf(key="BlackIs1", default="false")] + pub black_is_1: bool, + + #[pdf(key="DamagedRowsBeforeError", default="0")] + pub damaged_rows_before_error: u32, +} + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct JBIG2DecodeParams { + #[pdf(key="JBIG2Globals")] + pub globals: Option> +} +#[derive(Debug, Clone, DataSize, DeepClone)] +pub enum StreamFilter { + ASCIIHexDecode, + ASCII85Decode, + LZWDecode (LZWFlateParams), + FlateDecode (LZWFlateParams), + JPXDecode, //Jpeg2k + DCTDecode (DCTDecodeParams), + CCITTFaxDecode (CCITTFaxDecodeParams), + JBIG2Decode(JBIG2DecodeParams), + Crypt, + RunLengthDecode +} +impl StreamFilter { + pub fn from_kind_and_params(kind: &str, params: Dictionary, r: &impl Resolve) -> Result { + let params = Primitive::Dictionary (params); + Ok( + match kind { + "ASCIIHexDecode" => StreamFilter::ASCIIHexDecode, + "ASCII85Decode" => StreamFilter::ASCII85Decode, + "LZWDecode" => StreamFilter::LZWDecode (LZWFlateParams::from_primitive(params, r)?), + "FlateDecode" => StreamFilter::FlateDecode (LZWFlateParams::from_primitive(params, r)?), + "JPXDecode" => StreamFilter::JPXDecode, + "DCTDecode" => StreamFilter::DCTDecode (DCTDecodeParams::from_primitive(params, r)?), + "CCITTFaxDecode" => StreamFilter::CCITTFaxDecode (CCITTFaxDecodeParams::from_primitive(params, r)?), + "JBIG2Decode" => StreamFilter::JBIG2Decode(JBIG2DecodeParams::from_primitive(params, r)?), + "Crypt" => StreamFilter::Crypt, + "RunLengthDecode" => StreamFilter::RunLengthDecode, + ty => bail!("Unrecognized filter type {:?}", ty), + } + ) + } +} + +#[inline] +pub fn decode_nibble(c: u8) -> Option { + match c { + n @ b'0' ..= b'9' => Some(n - b'0'), + a @ b'a' ..= b'h' => Some(a - b'a' + 0xa), + a @ b'A' ..= b'H' => Some(a - b'A' + 0xA), + _ => None + } +} + +#[inline] +fn encode_nibble(c: u8) -> u8 { + match c { + 0 ..= 9 => b'0'+ c, + 10 ..= 15 => b'a' - 10 + c, + _ => unreachable!() + } +} + + +pub fn decode_hex(data: &[u8]) -> Result> { + let mut out = Vec::with_capacity(data.len() / 2); + let pairs = data.iter().cloned() + .take_while(|&b| b != b'>') + .filter(|&b| !matches!(b, 0 | 9 | 10 | 12 | 13 | 32)) + .tuples(); + for (i, (high, low)) in pairs.enumerate() { + if let (Some(low), Some(high)) = (decode_nibble(low), decode_nibble(high)) { + out.push(high << 4 | low); + } else { + return Err(PdfError::HexDecode {pos: i * 2, bytes: [high, low]}) + } + } + Ok(out) +} +pub fn encode_hex(data: &[u8]) -> Vec { + let mut buf = Vec::with_capacity(data.len() * 2); + for &b in data { + buf.push(encode_nibble(b >> 4)); + buf.push(encode_nibble(b & 0xf)); + } + buf +} + +#[inline] +fn sym_85(byte: u8) -> Option { + match byte { + b @ 0x21 ..= 0x75 => Some(b - 0x21), + _ => None + } +} + +fn word_85([a, b, c, d, e]: [u8; 5]) -> Option<[u8; 4]> { + fn s(b: u8) -> Option { sym_85(b).map(|n| n as u64) } + let (a, b, c, d, e) = (s(a)?, s(b)?, s(c)?, s(d)?, s(e)?); + let q = (((a * 85 + b) * 85 + c) * 85 + d) * 85 + e; + // 85^5 > 256^4, the result might not fit in an u32. + let r = u32::try_from(q).ok()?; + Some(r.to_be_bytes()) +} + +pub fn decode_85(data: &[u8]) -> Result> { + let mut out = Vec::with_capacity((data.len() + 4) / 5 * 4); + + let mut stream = data.iter().cloned() + .filter(|&b| !matches!(b, b' ' | b'\n' | b'\r' | b'\t')); + + let mut symbols = stream.by_ref() + .take_while(|&b| b != b'~'); + + let (tail_len, tail) = loop { + match symbols.next() { + Some(b'z') => out.extend_from_slice(&[0; 4]), + Some(a) => { + let (b, c, d, e) = match (symbols.next(), symbols.next(), symbols.next(), symbols.next()) { + (Some(b), Some(c), Some(d), Some(e)) => (b, c, d, e), + (None, _, _, _) => break (1, [a, b'u', b'u', b'u', b'u']), + (Some(b), None, _, _) => break (2, [a, b, b'u', b'u', b'u']), + (Some(b), Some(c), None, _) => break (3, [a, b, c, b'u', b'u']), + (Some(b), Some(c), Some(d), None) => break (4, [a, b, c, d, b'u']), + }; + out.extend_from_slice(&word_85([a, b, c, d, e]).ok_or(PdfError::Ascii85TailError)?); + } + None => break (0, [b'u'; 5]) + } + }; + + if tail_len > 0 { + let last = word_85(tail).ok_or(PdfError::Ascii85TailError)?; + out.extend_from_slice(&last[.. tail_len-1]); + } + + match (stream.next(), stream.next()) { + (Some(b'>'), None) => Ok(out), + _ => Err(PdfError::Ascii85TailError) + } +} + +#[inline] +fn divmod(n: u32, m: u32) -> (u32, u32) { + (n / m, n % m) +} + +#[inline] +fn a85(n: u32) -> u8 { + n as u8 + 0x21 +} + +#[inline] +fn base85_chunk(c: [u8; 4]) -> [u8; 5] { + let n = u32::from_be_bytes(c); + let (n, e) = divmod(n, 85); + let (n, d) = divmod(n, 85); + let (n, c) = divmod(n, 85); + let (a, b) = divmod(n, 85); + + [a85(a), a85(b), a85(c), a85(d), a85(e)] +} + +fn encode_85(data: &[u8]) -> Vec { + let mut buf = Vec::with_capacity((data.len() / 4) * 5 + 10); + let mut chunks = data.chunks_exact(4); + for chunk in chunks.by_ref() { + let c: [u8; 4] = chunk.try_into().unwrap(); + if c == [0; 4] { + buf.push(b'z'); + } else { + buf.extend_from_slice(&base85_chunk(c)); + } + } + + let r = chunks.remainder(); + if r.len() > 0 { + let mut c = [0; 4]; + c[.. r.len()].copy_from_slice(r); + let out = base85_chunk(c); + buf.extend_from_slice(&out[.. r.len() + 1]); + } + buf.extend_from_slice(b"~>"); + buf +} + +fn inflate_bytes_zlib(data: &[u8]) -> Result> { + use libflate::zlib::Decoder; + let mut decoder = Decoder::new(data)?; + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded)?; + Ok(decoded) +} + +fn inflate_bytes(data: &[u8]) -> Result> { + use libflate::deflate::Decoder; + let mut decoder = Decoder::new(data); + let mut decoded = Vec::new(); + decoder.read_to_end(&mut decoded)?; + Ok(decoded) +} + +pub fn flate_decode(data: &[u8], params: &LZWFlateParams) -> Result> { + + let predictor = params.predictor as usize; + let n_components = params.n_components as usize; + let columns = params.columns as usize; + let stride = columns * n_components; + + + // First flate decode + let decoded = { + if let Ok(data) = inflate_bytes_zlib(data) { + data + } else if let Ok(data) = inflate_bytes(data) { + data + } else { + dump_data(data); + bail!("can't inflate"); + } + }; + // Then unfilter (PNG) + // For this, take the old out as input, and write output to out + + if predictor > 10 { + let inp = decoded; // input buffer + let rows = inp.len() / (stride+1); + + // output buffer + let mut out = vec![0; rows * stride]; + + // Apply inverse predictor + let null_vec = vec![0; stride]; + + let mut in_off = 0; // offset into input buffer + + let mut out_off = 0; // offset into output buffer + let mut last_out_off = 0; // last offset to output buffer + + while in_off + stride < inp.len() { + let predictor = PredictorType::from_u8(inp[in_off])?; + in_off += 1; // +1 because the first byte on each row is predictor + + let row_in = &inp[in_off .. in_off + stride]; + let (prev_row, row_out) = if out_off == 0 { + (&null_vec[..], &mut out[out_off .. out_off+stride]) + } else { + let (prev, curr) = out.split_at_mut(out_off); + (&prev[last_out_off ..], &mut curr[.. stride]) + }; + unfilter(predictor, n_components, prev_row, row_in, row_out); + + last_out_off = out_off; + + in_off += stride; + out_off += stride; + } + Ok(out) + } else { + Ok(decoded) + } +} +fn flate_encode(data: &[u8]) -> Vec { + use libflate::deflate::Encoder; + let mut encoded = Vec::new(); + let mut encoder = Encoder::new(&mut encoded); + encoder.write_all(data).unwrap(); + encoded +} + +pub fn dct_decode(data: &[u8], _params: &DCTDecodeParams) -> Result> { + use jpeg_decoder::Decoder; + let mut decoder = Decoder::new(data); + let pixels = decoder.decode()?; + Ok(pixels) +} + +pub fn lzw_decode(data: &[u8], params: &LZWFlateParams) -> Result> { + use weezl::{BitOrder, decode::Decoder}; + let mut out = vec![]; + + let mut decoder = if params.early_change != 0 { + Decoder::with_tiff_size_switch(BitOrder::Msb, 9) + } else { + Decoder::new(BitOrder::Msb, 9) + }; + + decoder + .into_stream(&mut out) + .decode_all(data).status?; + Ok(out) +} +fn lzw_encode(data: &[u8], params: &LZWFlateParams) -> Result> { + use weezl::{BitOrder, encode::Encoder}; + if params.early_change != 0 { + bail!("encoding early_change != 0 is not supported"); + } + let mut compressed = vec![]; + Encoder::new(BitOrder::Msb, 9) + .into_stream(&mut compressed) + .encode_all(data).status?; + Ok(compressed) +} + +pub fn fax_decode(data: &[u8], params: &CCITTFaxDecodeParams) -> Result> { + use fax::{Color, decoder::{pels, decode_g4}}; + + if params.k < 0 { + let columns = params.columns as usize; + let rows = params.rows as usize; + + let height = if params.rows == 0 { None } else { Some(params.rows as u16)}; + let mut buf = Vec::with_capacity(columns * rows); + decode_g4(data.iter().cloned(), columns as u16, height, |line| { + buf.extend(pels(line, columns as u16).map(|c| match c { + Color::Black => 0, + Color::White => 255 + })); + assert_eq!(buf.len() % columns, 0, "len={}, columns={}", buf.len(), columns); + }).ok_or(PdfError::Other { msg: "faxdecode failed".into() })?; + assert_eq!(buf.len() % columns, 0, "len={}, columns={}", buf.len(), columns); + + if rows != 0 && buf.len() != columns * rows { + bail!("decoded length does not match (expected {rows}∙{columns}, got {})", buf.len()); + } + Ok(buf) + } else { + unimplemented!() + } +} + +pub fn run_length_decode(data: &[u8]) -> Result> { + // Used as specification + let mut buf = Vec::new(); + let d = data; + let mut c = 0; + + while c < data.len() { + let length = d[c]; // length is first byte + if length < 128 { + let start = c + 1; + let end = start + length as usize + 1; + // copy _following_ length + 1 bytes literally + buf.extend_from_slice(&d[start..end]); + c = end; // move cursor to next run + } else if length >= 129 { + let copy = 257 - length as usize; // copy 2 - 128 times + let b = d[c + 1]; // copied byte + buf.extend(std::iter::repeat(b).take(copy)); + c += 2; // move cursor to next run + } else { + break; // EOD + } + } + + Ok(buf) +} + +pub type DecodeFn = dyn Fn(&[u8]) -> Result> + Sync + Send + 'static; +static JPX_DECODER: OnceCell> = OnceCell::new(); +static JBIG2_DECODER: OnceCell> = OnceCell::new(); + +pub fn set_jpx_decoder(f: Box) { + let _ = JPX_DECODER.set(f); +} +pub fn set_jbig2_decoder(f: Box) { + let _ = JBIG2_DECODER.set(f); +} + +pub fn jpx_decode(data: &[u8]) -> Result> { + JPX_DECODER.get().ok_or_else(|| PdfError::Other { msg: "jp2k decoder not set".into()})?(data) +} +pub fn jbig2_decode(data: &[u8], globals: &[u8]) -> Result> { + let data = [ + // file header + // &[0x97, 0x4A, 0x42, 0x32, 0x0D, 0x0A, 0x1A, 0x0A, 0x01, 0x00, 0x00, 0x00, 0x01], + + globals, + data, + + // end of page + &[0x00, 0x00, 0x00, 0x03, 0x31, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00], + + // end of stream + &[0x00, 0x00, 0x00, 0x04, 0x33, 0x01, 0x00, 0x00, 0x00, 0x00], + ].concat(); + JBIG2_DECODER.get().ok_or_else(|| PdfError::Other { msg: "jbig2 decoder not set".into()})?(&data) +} + +pub fn decode(data: &[u8], filter: &StreamFilter) -> Result> { + match *filter { + StreamFilter::ASCIIHexDecode => decode_hex(data), + StreamFilter::ASCII85Decode => decode_85(data), + StreamFilter::LZWDecode(ref params) => lzw_decode(data, params), + StreamFilter::FlateDecode(ref params) => flate_decode(data, params), + StreamFilter::RunLengthDecode => run_length_decode(data), + StreamFilter::DCTDecode(ref params) => dct_decode(data, params), + + _ => bail!("unimplemented {filter:?}"), + } +} + +pub fn encode(data: &[u8], filter: &StreamFilter) -> Result> { + match *filter { + StreamFilter::ASCIIHexDecode => Ok(encode_hex(data)), + StreamFilter::ASCII85Decode => Ok(encode_85(data)), + StreamFilter::LZWDecode(ref params) => lzw_encode(data, params), + StreamFilter::FlateDecode (ref _params) => Ok(flate_encode(data)), + _ => unimplemented!(), + } +} + +/* + * Predictor - copied and adapted from PNG crate.. + */ + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(u8)] +#[allow(dead_code)] +pub enum PredictorType { + NoFilter = 0, + Sub = 1, + Up = 2, + Avg = 3, + Paeth = 4 +} + +impl PredictorType { + /// u8 -> Self. Temporary solution until Rust provides a canonical one. + pub fn from_u8(n: u8) -> Result { + match n { + 0 => Ok(PredictorType::NoFilter), + 1 => Ok(PredictorType::Sub), + 2 => Ok(PredictorType::Up), + 3 => Ok(PredictorType::Avg), + 4 => Ok(PredictorType::Paeth), + n => Err(PdfError::IncorrectPredictorType {n}) + } + } +} + +fn filter_paeth(a: u8, b: u8, c: u8) -> u8 { + let ia = a as i16; + let ib = b as i16; + let ic = c as i16; + + let p = ia + ib - ic; + + let pa = (p - ia).abs(); + let pb = (p - ib).abs(); + let pc = (p - ic).abs(); + + if pa <= pb && pa <= pc { + a + } else if pb <= pc { + b + } else { + c + } +} + +pub fn unfilter(filter: PredictorType, bpp: usize, prev: &[u8], inp: &[u8], out: &mut [u8]) { + use self::PredictorType::*; + let len = inp.len(); + assert_eq!(len, out.len()); + assert_eq!(len, prev.len()); + if bpp > len { + return; + } + + match filter { + NoFilter => { + out[..len].copy_from_slice(&inp[..len]); + } + Sub => { + out[..bpp].copy_from_slice(&inp[..bpp]); + + for i in bpp..len { + out[i] = inp[i].wrapping_add(out[i - bpp]); + } + } + Up => { + for i in 0..len { + out[i] = inp[i].wrapping_add(prev[i]); + } + } + Avg => { + for i in 0..bpp { + out[i] = inp[i].wrapping_add(prev[i] / 2); + } + + for i in bpp..len { + out[i] = inp[i].wrapping_add( + ((out[i - bpp] as i16 + prev[i] as i16) / 2) as u8 + ); + } + } + Paeth => { + for i in 0..bpp { + out[i] = inp[i].wrapping_add( + filter_paeth(0, prev[i], 0) + ); + } + + for i in bpp..len { + out[i] = inp[i].wrapping_add( + filter_paeth(out[i - bpp], prev[i], prev[i - bpp]) + ); + } + } + } +} + +#[allow(unused)] +pub fn filter(method: PredictorType, bpp: usize, previous: &[u8], current: &mut [u8]) { + use self::PredictorType::*; + let len = current.len(); + + match method { + NoFilter => (), + Sub => { + for i in (bpp..len).rev() { + current[i] = current[i].wrapping_sub(current[i - bpp]); + } + } + Up => { + for i in 0..len { + current[i] = current[i].wrapping_sub(previous[i]); + } + } + Avg => { + for i in (bpp..len).rev() { + current[i] = current[i].wrapping_sub(current[i - bpp].wrapping_add(previous[i]) / 2); + } + + for i in 0..bpp { + current[i] = current[i].wrapping_sub(previous[i] / 2); + } + } + Paeth => { + for i in (bpp..len).rev() { + current[i] = current[i].wrapping_sub(filter_paeth(current[i - bpp], previous[i], previous[i - bpp])); + } + + for i in 0..bpp { + current[i] = current[i].wrapping_sub(filter_paeth(0, previous[i], 0)); + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn base_85() { + fn s(b: &[u8]) -> &str { std::str::from_utf8(b).unwrap() } + + let case = &b"hello world!"[..]; + let encoded = encode_85(case); + assert_eq!(s(&encoded), "BOu!rD]j7BEbo80~>"); + let decoded = decode_85(&encoded).unwrap(); + assert_eq!(case, &*decoded); + /* + assert_eq!( + s(&decode_85( + &lzw_decode( + &decode_85(&include_bytes!("data/t01_lzw+base85.txt")[..]).unwrap(), + &LZWFlateParams::default() + ).unwrap() + ).unwrap()), + include_str!("data/t01_plain.txt") + ); + */ + } + + #[test] + fn run_length_decode_test() { + let x = run_length_decode(&[254, b'a', 255, b'b', 2, b'c', b'b', b'c', 254, b'a', 128]).unwrap(); + assert_eq!(b"aaabbcbcaaa", x.as_slice()); + } +} diff --git a/src-pdfrs/pdf/src/encoding.rs b/src-pdfrs/pdf/src/encoding.rs new file mode 100644 index 0000000..1efdb17 --- /dev/null +++ b/src-pdfrs/pdf/src/encoding.rs @@ -0,0 +1,108 @@ +use std::collections::HashMap; +use istring::SmallString; +use crate as pdf; +use crate::object::{Object, Resolve, ObjectWrite, DeepClone}; +use crate::primitive::{Primitive, Dictionary}; +use crate::error::{Result}; +use datasize::DataSize; + +#[derive(Debug, Clone, DataSize)] +pub struct Encoding { + pub base: BaseEncoding, + pub differences: HashMap, +} + +#[derive(Object, ObjectWrite, Debug, Clone, Eq, PartialEq, DataSize)] +pub enum BaseEncoding { + StandardEncoding, + SymbolEncoding, + MacRomanEncoding, + WinAnsiEncoding, + MacExpertEncoding, + #[pdf(name = "Identity-H")] + IdentityH, + None, + + #[pdf(other)] + Other(String), +} +impl Object for Encoding { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + name @ Primitive::Name(_) => { + Ok(Encoding { + base: BaseEncoding::from_primitive(name, resolve)?, + differences: HashMap::new(), + }) + } + Primitive::Dictionary(mut dict) => { + let base = match dict.remove("BaseEncoding") { + Some(p) => BaseEncoding::from_primitive(p, resolve)?, + None => BaseEncoding::None + }; + let mut gid = 0; + let mut differences = HashMap::new(); + if let Some(p) = dict.remove("Differences") { + for part in p.resolve(resolve)?.into_array()? { + match part { + Primitive::Integer(code) => { + gid = code as u32; + } + Primitive::Name(name) => { + differences.insert(gid, name); + gid += 1; + } + _ => bail!("Unknown part primitive in dictionary: {:?}", part), + } + } + } + Ok(Encoding { base, differences }) + } + Primitive::Reference(r) => Self::from_primitive(resolve.resolve(r)?, resolve), + Primitive::Stream(s) => Self::from_primitive(Primitive::Dictionary(s.info), resolve), + _ => bail!("Unknown element: {:?}", p), + } + } +} +impl ObjectWrite for Encoding { + fn to_primitive(&self, update: &mut impl pdf::object::Updater) -> Result { + let base = self.base.to_primitive(update)?; + if self.differences.len() == 0 { + Ok(base) + } else { + let mut list = vec![]; + + let mut diff_list: Vec<_> = self.differences.iter().collect(); + diff_list.sort(); + let mut last = None; + + for &(&gid, name) in diff_list.iter() { + if !last.map(|n| n + 1 == gid).unwrap_or(false) { + list.push(Primitive::Integer(gid as i32)); + } + + list.push(Primitive::Name(name.clone())); + + last = Some(gid); + } + + let mut dict = Dictionary::new(); + dict.insert("BaseEncoding", base); + dict.insert("Differences", Primitive::Array(list)); + Ok(Primitive::Dictionary(dict)) + } + } +} +impl Encoding { + pub fn standard() -> Encoding { + Encoding { + base: BaseEncoding::StandardEncoding, + differences: HashMap::new() + } + } +} +impl DeepClone for Encoding { + fn deep_clone(&self, cloner: &mut impl pdf::object::Cloner) -> Result { + Ok(self.clone()) + } +} \ No newline at end of file diff --git a/src-pdfrs/pdf/src/error.rs b/src-pdfrs/pdf/src/error.rs new file mode 100644 index 0000000..f96abda --- /dev/null +++ b/src-pdfrs/pdf/src/error.rs @@ -0,0 +1,347 @@ +use crate::object::ObjNr; +use std::io; +use std::error::Error; +use crate::parser::ParseFlags; +use std::sync::Arc; +use datasize::{DataSize, data_size}; +use snafu::ErrorCompat; + +#[derive(Debug, Snafu)] +pub enum PdfError { + // Syntax / parsing + #[snafu(display("Unexpected end of file"))] + EOF, + + #[snafu(display("Shared, caused by\n {}", source))] + Shared { + #[snafu(source)] + source: Arc + }, + + #[snafu(display("Not enough Operator arguments"))] + NoOpArg, + + #[snafu(display("Error parsing from string, caused by\n {}", source))] + Parse { + #[snafu(source)] + source: Box + }, + + #[snafu(display("Invalid encoding, caused by\n {}", source))] + Encoding { + #[snafu(source)] + source: Box + }, + + #[snafu(display("Out of bounds: index {}, but len is {}", index, len))] + Bounds { index: usize, len: usize }, + + #[snafu(display("Unexpected token '{}' at {} - expected '{}'", lexeme, pos, expected))] + UnexpectedLexeme {pos: usize, lexeme: String, expected: &'static str}, + + #[snafu(display("Expecting an object, encountered {} at pos {}. Rest:\n{}\n\n((end rest))", first_lexeme, pos, rest))] + UnknownType {pos: usize, first_lexeme: String, rest: String}, + + #[snafu(display("Unknown variant '{}' for enum {}", name, id))] + UnknownVariant { id: &'static str, name: String }, + + #[snafu(display("'{}' not found.", word))] + NotFound { word: String }, + + #[snafu(display("Cannot follow reference during parsing - no resolve fn given (most likely /Length of Stream)."))] + Reference, // TODO: which one? + + #[snafu(display("Erroneous 'type' field in xref stream - expected 0, 1 or 2, found {}", found))] + XRefStreamType { found: u64 }, + + #[snafu(display("Parsing read past boundary of Contents."))] + ContentReadPastBoundary, + + #[snafu(display("Primitive not allowed"))] + PrimitiveNotAllowed { allowed: ParseFlags, found: ParseFlags }, + + ////////////////// + // Encode/decode + #[snafu(display("Hex decode error. Position {}, bytes {:?}", pos, bytes))] + HexDecode {pos: usize, bytes: [u8; 2]}, + + #[snafu(display("Ascii85 tail error"))] + Ascii85TailError, + + #[snafu(display("Failed to convert '{}' into PredictorType", n))] + IncorrectPredictorType {n: u8}, + + ////////////////// + // Dictionary + #[snafu(display("Can't parse field {} of struct {}, caused by\n {}", field, typ, source))] + FromPrimitive { + typ: &'static str, + field: &'static str, + #[snafu(source)] + source: Box + }, + + #[snafu(display("Field /{} is missing in dictionary for type {}.", field, typ))] + MissingEntry { + typ: &'static str, + field: String + }, + + #[snafu(display("Expected to find value {} for key {}. Found {} instead.", value, key, found))] + KeyValueMismatch { + key: String, + value: String, + found: String, + }, + + #[snafu(display("Expected dictionary /Type = {}. Found /Type = {}.", expected, found))] + WrongDictionaryType { + expected: String, + found: String + }, + + ////////////////// + // Misc + #[snafu(display("Tried to dereference free object nr {}.", obj_nr))] + FreeObject {obj_nr: u64}, + + #[snafu(display("Tried to dereference non-existing object nr {}.", obj_nr))] + NullRef {obj_nr: u64}, + + #[snafu(display("Expected primitive {}, found primitive {} instead.", expected, found))] + UnexpectedPrimitive {expected: &'static str, found: &'static str}, + /* + WrongObjectType {expected: &'static str, found: &'static str} { + description("Function called on object of wrong type.") + display("Expected {}, found {}.", expected, found) + } + */ + #[snafu(display("Object stream index out of bounds ({}/{}).", index, max))] + ObjStmOutOfBounds {index: usize, max: usize}, + + #[snafu(display("Page out of bounds ({}/{}).", page_nr, max))] + PageOutOfBounds {page_nr: u32, max: u32}, + + #[snafu(display("Page {} could not be found in the page tree.", page_nr))] + PageNotFound {page_nr: u32}, + + #[snafu(display("Entry {} in xref table unspecified", id))] + UnspecifiedXRefEntry {id: ObjNr}, + + #[snafu(display("Invalid password"))] + InvalidPassword, + + #[snafu(display("Decryption failure"))] + DecryptionFailure, + + #[snafu(display("JPEG Error, caused by\n {}", source))] + Jpeg { + #[snafu(source)] + source: jpeg_decoder::Error + }, + + #[snafu(display("IO Error, caused by\n {}", source))] + Io { + #[snafu(source)] + source: io::Error + }, + + #[snafu(display("{}", msg))] + Other { msg: String }, + + #[snafu(display("NoneError at {}:{}:{}:{}", file, line, column, context))] + NoneError { file: &'static str, line: u32, column: u32, context: Context }, + + #[snafu(display("Try at {}:{}:{}:{}, caused by\n {}", file, line, column, context, source))] + Try { + file: &'static str, + line: u32, + column: u32, + context: Context, + #[snafu(source)] + source: Box + }, + + #[snafu(display("PostScriptParseError"))] + PostScriptParse, + + #[snafu(display("PostScriptExecError"))] + PostScriptExec, + + #[snafu(display("UTF16 decode error"))] + Utf16Decode, + + #[snafu(display("UTF8 decode error"))] + Utf8Decode, + + #[snafu(display("CID decode error"))] + CidDecode, + + #[snafu(display("Max nesting depth reached"))] + MaxDepth, + + #[snafu(display("Invalid"))] + Invalid, +} +impl PdfError { + pub fn is_eof(&self) -> bool { + match self { + PdfError::EOF => true, + PdfError::Try { ref source, .. } => source.is_eof(), + _ => false + } + } +} +datasize::non_dynamic_const_heap_size!(PdfError, 0); + +#[cfg(feature="cache")] +impl globalcache::ValueSize for PdfError { + #[inline] + fn size(&self) -> usize { + data_size(self) + } +} + +#[derive(Debug)] +pub struct Context(pub Vec<(&'static str, String)>); +impl std::fmt::Display for Context { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + for (i, &(key, ref val)) in self.0.iter().enumerate() { + if i == 0 { + writeln!(f)?; + } + writeln!(f, " {} = {}", key, val)?; + } + Ok(()) + } +} + +pub type Result = std::result::Result; + +impl From for PdfError { + fn from(source: io::Error) -> PdfError { + PdfError::Io { source } + } +} +impl From for PdfError { + fn from(msg: String) -> PdfError { + PdfError::Other { msg } + } +} +impl From> for PdfError { + fn from(source: Arc) -> PdfError { + PdfError::Shared { source } + } +} + +#[macro_export] +macro_rules! try_opt { + ($e:expr $(,$c:expr)*) => ( + match $e { + Some(v) => v, + None => { + let context = $crate::error::Context(vec![ $( (stringify!($c), format!("{:?}", $c) ) ),* ]); + return Err($crate::PdfError::NoneError { + file: file!(), + line: line!(), + column: column!(), + context, + }); + } + } + ); +} + +#[macro_export] +macro_rules! t { + ($e:expr $(,$c:expr)*) => { + match $e { + Ok(v) => v, + Err(e) => { + let context = $crate::error::Context(vec![ $( (stringify!($c), format!("{:?}", $c) ) ),* ]); + return Err($crate::PdfError::Try { file: file!(), line: line!(), column: column!(), context, source: e.into() }) + } + } + }; +} + +#[macro_export] +macro_rules! ctx { + ($e:expr, $($c:expr),*) => { + match $e { + Ok(v) => Ok(v), + Err(e) => { + let context = $crate::error::Context(vec![ $( (stringify!($c), format!("{:?}", $c) ) ),* ]); + Err($crate::PdfError::TryContext { file: file!(), line: line!(), column: column!(), context, source: e.into() }) + } + } + }; +} + +macro_rules! err_from { + ($($st:ty),* => $variant:ident) => ( + $( + impl From<$st> for PdfError { + fn from(e: $st) -> PdfError { + PdfError::$variant { source: e.into() } + } + } + )* + ) +} +err_from!(std::str::Utf8Error, std::string::FromUtf8Error, std::string::FromUtf16Error, + istring::FromUtf8Error, istring::FromUtf8Error => Encoding); +err_from!(std::num::ParseIntError, std::string::ParseError => Parse); +err_from!(jpeg_decoder::Error => Jpeg); + +macro_rules! other { + ($($t:tt)*) => ($crate::PdfError::Other { msg: format!($($t)*) }) +} + +macro_rules! err { + ($e: expr) => ({ + return Err($e); + }) +} +macro_rules! bail { + ($($t:tt)*) => { + err!($crate::PdfError::Other { msg: format!($($t)*) }) + } +} +macro_rules! unimplemented { + () => (bail!("Unimplemented @ {}:{}", file!(), line!())) +} + +#[cfg(not(feature = "dump"))] +pub fn dump_data(_data: &[u8]) {} + +#[cfg(feature = "dump")] +pub fn dump_data(data: &[u8]) { + use std::io::Write; + if let Some(path) = ::std::env::var_os("PDF_OUT") { + let (mut file, path) = tempfile::Builder::new() + .prefix("") + .tempfile_in(path).unwrap() + .keep().unwrap(); + file.write_all(&data).unwrap(); + info!("data written to {:?}", path); + } else { + info!("set PDF_OUT to an existing directory to dump stream data"); + } +} + +#[cfg(test)] +mod tests { + use super::PdfError; + + fn assert_send() {} + + fn assert_sync() {} + + #[test] + fn error_is_send_and_sync() { + // note that these checks happens at compile time, not when the test is run + assert_send::(); + assert_sync::(); + } +} diff --git a/src-pdfrs/pdf/src/file.rs b/src-pdfrs/pdf/src/file.rs new file mode 100644 index 0000000..a5bb1dc --- /dev/null +++ b/src-pdfrs/pdf/src/file.rs @@ -0,0 +1,708 @@ +//! This is kind of the entry-point of the type-safe PDF functionality. +use std::marker::PhantomData; +use std::collections::HashMap; +use std::sync::{Arc, Mutex}; +use std::path::Path; +use std::io::Write; + +use crate as pdf; +use crate::error::*; +use crate::object::*; +use crate::primitive::{Primitive, Dictionary, PdfString}; +use crate::backend::Backend; +use crate::any::*; +use crate::parser::{Lexer, parse_with_lexer}; +use crate::parser::{parse_indirect_object, parse, ParseFlags}; +use crate::xref::{XRef, XRefTable, XRefInfo}; +use crate::crypt::Decoder; +use crate::crypt::CryptDict; +use crate::enc::{StreamFilter, decode}; +use std::ops::Range; +use datasize::DataSize; + +#[cfg(feature="cache")] +pub use globalcache::{ValueSize, sync::SyncCache}; + +#[must_use] +pub struct PromisedRef { + inner: PlainRef, + _marker: PhantomData +} +impl PromisedRef { + pub fn get_inner(&self) -> PlainRef { + self.inner + } + pub fn get_ref(&self) -> Ref { + Ref::new(self.inner) + } +} + +pub trait Cache { + fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T; + fn clear(&self); +} +pub struct NoCache; +impl Cache for NoCache { + fn get_or_compute(&self, _key: PlainRef, compute: impl FnOnce() -> T) -> T { + compute() + } + fn clear(&self) {} +} + +#[cfg(feature="cache")] +impl Cache for Arc> { + fn get_or_compute(&self, key: PlainRef, compute: impl FnOnce() -> T) -> T { + self.get(key, compute) + } + fn clear(&self) { + (**self).clear() + } +} + +pub trait Log { + fn load_object(&self, _r: PlainRef) {} + fn log_get(&self, _r: PlainRef) {} +} +pub struct NoLog; +impl Log for NoLog {} + +pub struct Storage { + // objects identical to those in the backend + cache: OC, + stream_cache: SC, + + // objects that differ from the backend + changes: HashMap, + + refs: XRefTable, + + decoder: Option, + options: ParseOptions, + + backend: B, + + // Position of the PDF header in the file. + start_offset: usize, + + log: L +} + +impl Storage, OC, SC, L> +where + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + pub fn empty(object_cache: OC, stream_cache: SC, log: L) -> Self { + Storage { + cache: object_cache, + stream_cache, + changes: HashMap::new(), + refs: XRefTable::new(0), + decoder: None, + options: ParseOptions::strict(), + backend: Vec::from(&b"%PDF-1.7\n"[..]), + start_offset: 0, + log + } + } +} + +impl Storage +where + B: Backend, + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + pub fn into_inner(self) -> B { + self.backend + } + pub fn resolver(&self) -> impl Resolve + '_ { + StorageResolver::new(self) + } + pub fn with_cache(backend: B, options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result { + Ok(Storage { + start_offset: backend.locate_start_offset()?, + backend, + refs: XRefTable::new(0), + cache: object_cache, + stream_cache, + changes: HashMap::new(), + decoder: None, + options, + log + }) + } + fn decode(&self, id: PlainRef, range: Range, filters: &[StreamFilter]) -> Result> { + let data = self.backend.read(range)?; + + let mut data = Vec::from(data); + if let Some(ref decoder) = self.decoder { + data = Vec::from(t!(decoder.decrypt(id, &mut data))); + } + for filter in filters { + data = t!(decode(&data, filter), filter); + } + Ok(data.into()) + } + + pub fn load_storage_and_trailer(&mut self) -> Result { + self.load_storage_and_trailer_password(b"") + } + + pub fn load_storage_and_trailer_password(&mut self, password: &[u8]) -> Result { + + let resolver = StorageResolver::new(self); + let (refs, trailer) = t!(self.backend.read_xref_table_and_trailer(self.start_offset, &resolver)); + self.refs = refs; + + if let Some(crypt) = trailer.get("Encrypt") { + let key = trailer + .get("ID") + .ok_or(PdfError::MissingEntry { + typ: "Trailer", + field: "ID".into(), + })? + .as_array()? + .get(0) + .ok_or(PdfError::MissingEntry { + typ: "Trailer", + field: "ID[0]".into() + })? + .as_string()? + .as_bytes(); + + let resolver = StorageResolver::new(self); + let dict = CryptDict::from_primitive(crypt.clone(), &resolver)?; + + self.decoder = Some(t!(Decoder::from_password(&dict, key, password))); + if let Primitive::Reference(reference) = crypt { + self.decoder.as_mut().unwrap().encrypt_indirect_object = Some(*reference); + } + if let Some(Primitive::Reference(catalog_ref)) = trailer.get("Root") { + let resolver = StorageResolver::new(self); + let catalog = t!(t!(resolver.resolve(*catalog_ref)).resolve(&resolver)?.into_dictionary()); + if let Some(Primitive::Reference(metadata_ref)) = catalog.get("Metadata") { + self.decoder.as_mut().unwrap().metadata_indirect_object = Some(*metadata_ref); + } + } + } + Ok(trailer) + } + pub fn scan(&self) -> impl Iterator> + '_ { + let xref_offset = self.backend.locate_xref_offset().unwrap(); + let slice = self.backend.read(self.start_offset .. xref_offset).unwrap(); + let mut lexer = Lexer::with_offset(slice, 0); + + fn skip_xref(lexer: &mut Lexer) -> Result<()> { + while lexer.next()? != "trailer" { + + } + Ok(()) + } + + let resolver = StorageResolver::new(self); + std::iter::from_fn(move || { + loop { + let pos = lexer.get_pos(); + match parse_indirect_object(&mut lexer, &resolver, self.decoder.as_ref(), ParseFlags::all()) { + Ok((r, p)) => return Some(Ok(ScanItem::Object(r, p))), + Err(e) if e.is_eof() => return None, + Err(e) => { + lexer.set_pos(pos); + if let Ok(s) = lexer.next() { + debug!("next: {:?}", String::from_utf8_lossy(s.as_slice())); + match &*s { + b"xref" => { + if let Err(e) = skip_xref(&mut lexer) { + return Some(Err(e)); + } + if let Ok(trailer) = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::DICT).and_then(|p| p.into_dictionary()) { + return Some(Ok(ScanItem::Trailer(trailer))); + } + } + b"startxref" if lexer.next().is_ok() => { + continue; + } + _ => {} + } + } + return Some(Err(e)); + } + } + } + }) + } + fn resolve_ref(&self, r: PlainRef, flags: ParseFlags, resolve: &impl Resolve) -> Result { + match self.changes.get(&r.id) { + Some((p, _)) => Ok((*p).clone()), + None => match t!(self.refs.get(r.id)) { + XRef::Raw {pos, ..} => { + let mut lexer = Lexer::with_offset(t!(self.backend.read(self.start_offset + pos ..)), self.start_offset + pos); + let p = t!(parse_indirect_object(&mut lexer, resolve, self.decoder.as_ref(), flags)).1; + Ok(p) + } + XRef::Stream {stream_id, index} => { + if !flags.contains(ParseFlags::STREAM) { + return Err(PdfError::PrimitiveNotAllowed { found: ParseFlags::STREAM, allowed: flags }); + } + // use get to cache the object stream + let obj_stream = resolve.get::(Ref::from_id(stream_id))?; + + let (data, range) = t!(obj_stream.get_object_slice(index, resolve)); + let slice = data.get(range.clone()).ok_or_else(|| other!("invalid range {:?}, but only have {} bytes", range, data.len()))?; + parse(slice, resolve, flags) + } + XRef::Free {..} => err!(PdfError::FreeObject {obj_nr: r.id}), + XRef::Promised => unimplemented!(), + XRef::Invalid => err!(PdfError::NullRef {obj_nr: r.id}), + } + } + } +} + +pub enum ScanItem { + Object(PlainRef, Primitive), + Trailer(Dictionary) +} + +struct StorageResolver<'a, B, OC, SC, L> { + storage: &'a Storage, + chain: Mutex>, +} +impl<'a, B, OC, SC, L> StorageResolver<'a, B, OC, SC, L> { + pub fn new(storage: &'a Storage) -> Self { + StorageResolver { + storage, + chain: Mutex::new(vec![]) + } + } +} + +struct Defer(F); +impl Drop for Defer { + fn drop(&mut self) { + (self.0)(); + } +} + +impl<'a, B, OC, SC, L> Resolve for StorageResolver<'a, B, OC, SC, L> +where + B: Backend, + OC: Cache>>, + SC: Cache, Arc>>, + L: Log +{ + fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, _depth: usize) -> Result { + let storage = self.storage; + storage.log.load_object(r); + + storage.resolve_ref(r, flags, self) + } + + fn get(&self, r: Ref) -> Result> { + let key = r.get_inner(); + self.storage.log.log_get(key); + + { + debug!("get {key:?} as {}", std::any::type_name::()); + let mut chain = self.chain.lock().unwrap(); + if chain.contains(&key) { + bail!("Recursive reference"); + } + chain.push(key); + } + let _defer = Defer(|| { + let mut chain = self.chain.lock().unwrap(); + assert_eq!(chain.pop(), Some(key)); + }); + + let res = self.storage.cache.get_or_compute(key, || { + match self.resolve(key).and_then(|p| T::from_primitive(p, self)) { + Ok(obj) => Ok(AnySync::new(Shared::new(obj))), + Err(e) => { + let p = self.resolve(key); + warn!("failed to decode {p:?} as {}", std::any::type_name::()); + Err(Arc::new(e)) + } + } + }); + match res { + Ok(any) => { + match any.downcast() { + Ok(val) => Ok(RcRef::new(key, val)), + Err(_) => { + let p = self.resolve(key)?; + Ok(RcRef::new(key, T::from_primitive(p, self)?.into())) + } + } + } + Err(e) => Err(PdfError::Shared { source: e.clone()}), + } + } + fn options(&self) -> &ParseOptions { + &self.storage.options + } + fn stream_data(&self, id: PlainRef, range: Range) -> Result> { + self.storage.decode(id, range, &[]) + } + + fn get_data_or_decode(&self, id: PlainRef, range: Range, filters: &[StreamFilter]) -> Result> { + self.storage.stream_cache.get_or_compute(id, || self.storage.decode(id, range, filters).map_err(Arc::new)) + .map_err(|e| e.into()) + } +} + +impl Updater for Storage +where + B: Backend, + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + fn create(&mut self, obj: T) -> Result> { + let id = self.refs.len() as u64; + self.refs.push(XRef::Promised); + let primitive = obj.to_primitive(self)?; + self.changes.insert(id, (primitive, 0)); + let rc = Shared::new(obj); + let r = PlainRef { id, gen: 0 }; + + Ok(RcRef::new(r, rc)) + } + fn update(&mut self, old: PlainRef, obj: T) -> Result> { + use std::collections::hash_map::Entry; + + let r = match self.refs.get(old.id)? { + XRef::Free { .. } => panic!(), + XRef::Raw { gen_nr, .. } => PlainRef { id: old.id, gen: gen_nr }, + XRef::Stream { .. } => return self.create(obj), + XRef::Promised => PlainRef { id: old.id, gen: 0 }, + XRef::Invalid => panic!() + }; + let primitive = obj.to_primitive(self)?; + match self.changes.entry(old.id) { + Entry::Vacant(e) => { + e.insert((primitive, r.gen)); + } + Entry::Occupied(mut e) => match (e.get_mut(), primitive) { + ((Primitive::Dictionary(ref mut dict), _), Primitive::Dictionary(new)) => { + dict.append(new); + } + (old, new) => { + *old = (new, r.gen); + } + } + } + let rc = Shared::new(obj); + + Ok(RcRef::new(r, rc)) + } + + fn promise(&mut self) -> PromisedRef { + let id = self.refs.len() as u64; + + self.refs.push(XRef::Promised); + + PromisedRef { + inner: PlainRef { + id, + gen: 0 + }, + _marker: PhantomData + } + } + + fn fulfill(&mut self, promise: PromisedRef, obj: T) -> Result> { + self.update(promise.inner, obj) + } +} + +impl Storage, OC, SC, L> +where + OC: Cache>>, + SC: Cache, Arc>>, + L: Log +{ + pub fn save(&mut self, trailer: &mut Trailer) -> Result<&[u8]> { + // writing the trailer generates another id for the info dictionary + trailer.size = (self.refs.len() + 2) as _; + let trailer_dict = trailer.to_dict(self)?; + + let xref_promise = self.promise::>(); + + let mut changes: Vec<_> = self.changes.iter().collect(); + changes.sort_unstable_by_key(|&(id, _)| id); + + for &(&id, &(ref primitive, gen)) in changes.iter() { + let pos = self.backend.len(); + self.refs.set(id, XRef::Raw { pos: pos as _, gen_nr: gen }); + writeln!(self.backend, "{} {} obj", id, gen)?; + primitive.serialize(&mut self.backend)?; + writeln!(self.backend, "endobj")?; + } + + let xref_pos = self.backend.len(); + self.refs.set(xref_promise.get_inner().id, XRef::Raw { pos: xref_pos, gen_nr: 0 }); + // only write up to the xref stream obj id + let stream = self.refs.write_stream(xref_promise.get_inner().id as usize + 1)?; + + writeln!(self.backend, "{} {} obj", xref_promise.get_inner().id, 0)?; + let mut xref_and_trailer = stream.to_pdf_stream(&mut NoUpdate)?; + for (k, v) in trailer_dict.iter() { + xref_and_trailer.info.insert(k.clone(), v.clone()); + } + + xref_and_trailer.serialize(&mut self.backend)?; + writeln!(self.backend, "endobj")?; + + let _ = self.fulfill(xref_promise, stream)?; + + write!(self.backend, "\nstartxref\n{}\n%%EOF", xref_pos).unwrap(); + + // update trailer which may have change now. + self.cache.clear(); + *trailer = Trailer::from_dict(trailer_dict, &self.resolver())?; + + Ok(&self.backend) + } +} + +#[cfg(feature="cache")] +pub type ObjectCache = Arc>>>; +#[cfg(feature="cache")] +pub type StreamCache = Arc, Arc>>>; +#[cfg(feature="cache")] +pub type CachedFile = File; + +pub struct File { + storage: Storage, + pub trailer: Trailer, +} +impl Updater for File +where + B: Backend, + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + fn create(&mut self, obj: T) -> Result> { + self.storage.create(obj) + } + fn update(&mut self, old: PlainRef, obj: T) -> Result> { + self.storage.update(old, obj) + } + fn promise(&mut self) -> PromisedRef { + self.storage.promise() + } + fn fulfill(&mut self, promise: PromisedRef, obj: T) -> Result> { + self.storage.fulfill(promise, obj) + } +} + +impl File, OC, SC, L> +where + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + pub fn save_to(&mut self, path: impl AsRef) -> Result<()> { + std::fs::write(path, self.storage.save(&mut self.trailer)?)?; + Ok(()) + } +} + + +pub struct FileOptions<'a, OC, SC, L> { + oc: OC, + sc: SC, + log: L, + password: &'a [u8], + parse_options: ParseOptions, +} +impl FileOptions<'static, NoCache, NoCache, NoLog> { + pub fn uncached() -> Self { + FileOptions { + oc: NoCache, + sc: NoCache, + password: b"", + parse_options: ParseOptions::strict(), + log: NoLog, + } + } +} + +#[cfg(feature="cache")] +impl FileOptions<'static, ObjectCache, StreamCache, NoLog> { + pub fn cached() -> Self { + FileOptions { + oc: SyncCache::new(), + sc: SyncCache::new(), + password: b"", + parse_options: ParseOptions::strict(), + log: NoLog + } + } +} +impl<'a, OC, SC, L> FileOptions<'a, OC, SC, L> +where + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + pub fn password(self, password: &'a [u8]) -> FileOptions<'a, OC, SC, L> { + FileOptions { + password, + .. self + } + } + pub fn cache(self, oc: O, sc: S) -> FileOptions<'a, O, S, L> { + let FileOptions { oc: _, sc: _, password, parse_options, log } = self; + FileOptions { + oc, + sc, + password, + parse_options, + log, + } + } + pub fn log(self, log: Log) -> FileOptions<'a, OC, SC, Log> { + let FileOptions { oc, sc, password, parse_options, .. } = self; + FileOptions { + oc, + sc, + password, + parse_options, + log, + } + } + pub fn parse_options(self, parse_options: ParseOptions) -> Self { + FileOptions { parse_options, .. self } + } + + /// open a file + pub fn open(self, path: impl AsRef) -> Result, OC, SC, L>> { + let data = std::fs::read(path)?; + self.load(data) + } + pub fn storage(self) -> Storage, OC, SC, L> { + let FileOptions { oc, sc, log, .. } = self; + Storage::empty(oc, sc, log) + } + + /// load data from the given backend + pub fn load(self, backend: B) -> Result> { + let FileOptions { oc, sc, password, parse_options, log } = self; + File::load_data(backend, password, parse_options, oc, sc, log) + } +} + + +impl File +where + B: Backend, + OC: Cache>>, + SC: Cache, Arc>>, + L: Log, +{ + fn load_data(backend: B, password: &[u8], options: ParseOptions, object_cache: OC, stream_cache: SC, log: L) -> Result { + let mut storage = Storage::with_cache(backend, options, object_cache, stream_cache, log)?; + let trailer = storage.load_storage_and_trailer_password(password)?; + + let resolver = StorageResolver::new(&storage); + let trailer = t!(Trailer::from_primitive( + Primitive::Dictionary(trailer), + &resolver, + )); + Ok(File { storage, trailer }) + } + pub fn new(storage: Storage, trailer: Trailer) -> Self { + File { storage, trailer } + } + pub fn resolver(&self) -> impl Resolve + '_ { + StorageResolver::new(&self.storage) + } + + pub fn get_root(&self) -> &Catalog { + &self.trailer.root + } + + pub fn pages(&self) -> impl Iterator> + '_ { + (0 .. self.num_pages()).map(move |n| self.get_page(n)) + } + pub fn num_pages(&self) -> u32 { + self.trailer.root.pages.count + } + + pub fn get_page(&self, n: u32) -> Result { + let resolver = StorageResolver::new(&self.storage); + self.trailer.root.pages.page(&resolver, n) + } + + pub fn get_xref(&self) -> &XRefTable { + &self.storage.refs + } + + pub fn update_catalog(&mut self, catalog: Catalog) -> Result<()> { + self.trailer.root = self.create(catalog)?; + Ok(()) + } + + pub fn set_options(&mut self, options: ParseOptions) { + self.storage.options = options; + } + + pub fn scan(&self) -> impl Iterator> + '_ { + self.storage.scan() + } + + pub fn log(&self) -> &L { + &self.storage.log + } +} + +#[derive(Object, ObjectWrite, DataSize)] +pub struct Trailer { + #[pdf(key = "Size")] + pub size: i32, + + #[pdf(key = "Prev")] + pub prev_trailer_pos: Option, + + #[pdf(key = "Root")] + pub root: RcRef, + + #[pdf(key = "Encrypt")] + pub encrypt_dict: Option>, + + #[pdf(key = "Info", indirect)] + pub info_dict: Option>, + + #[pdf(key = "ID")] + pub id: Vec, + + #[pdf(other)] + pub other: Dictionary, +} + +#[test] +fn test_read_x_ref() { + let file = FileOptions::cached().open("/").unwrap(); + let resolver = file.resolver(); + let xref_table = file.get_xref(); + let entries: Vec = xref_table.iter().enumerate() + .map(|(i, x)| PlainRef {id: i as u64, gen: x as u64}) + .map(|plain_ref| resolver.resolve(plain_ref)) + .filter(|r| r.is_ok()) + .map(|r| r.unwrap()) + .collect(); + for (i, entry) in entries.iter().enumerate() { + match entry { + Primitive::Stream(stream) => println!("{}: Stream {}", i, stream.info), + Primitive::Dictionary(_) => println!("{}: Dictionary {}", i, entry), + Primitive::Array(_) => println!("{}: Array: {}", i, entry), + _ => println!("{}: {}", i, entry) + } + } +} diff --git a/src-pdfrs/pdf/src/font.rs b/src-pdfrs/pdf/src/font.rs new file mode 100644 index 0000000..4a45133 --- /dev/null +++ b/src-pdfrs/pdf/src/font.rs @@ -0,0 +1,739 @@ +use crate as pdf; +use crate::object::*; +use crate::primitive::*; +use crate::error::*; +use crate::encoding::Encoding; +use std::collections::HashMap; +use std::fmt::Write; +use crate::parser::{Lexer, parse_with_lexer, ParseFlags}; +use std::convert::TryInto; +use std::sync::Arc; +use istring::SmallString; +use datasize::DataSize; +use itertools::Itertools; + +#[allow(non_upper_case_globals, dead_code)] +mod flags { + pub const FixedPitch: u32 = 1 << 0; + pub const Serif: u32 = 1 << 1; + pub const Symbolic: u32 = 1 << 2; + pub const Script: u32 = 1 << 3; + pub const Nonsymbolic: u32 = 1 << 5; + pub const Italic: u32 = 1 << 6; + pub const AllCap: u32 = 1 << 16; + pub const SmallCap: u32 = 1 << 17; + pub const ForceBold: u32 = 1 << 18; +} + +#[derive(Object, ObjectWrite, Debug, Copy, Clone, DataSize, DeepClone)] +pub enum FontType { + Type0, + Type1, + MMType1, + Type3, + TrueType, + CIDFontType0, //Type1 + CIDFontType2, // TrueType +} + +#[derive(Debug, DataSize, DeepClone)] +pub struct Font { + pub subtype: FontType, + pub name: Option, + pub data: FontData, + + pub encoding: Option, + + // FIXME: Should use RcRef + pub to_unicode: Option>>, + + /// other keys not mapped in other places. May change over time without notice, and adding things probably will break things. So don't expect this to be part of the stable API + pub _other: Dictionary +} + +#[derive(Debug, DataSize, DeepClone)] +pub enum FontData { + Type1(TFont), + Type0(Type0Font), + TrueType(TFont), + CIDFontType0(CIDFont), + CIDFontType2(CIDFont), + Other(Dictionary), +} + +#[derive(Debug, DataSize, DeepClone)] +pub enum CidToGidMap { + Identity, + Table(Vec) +} +impl Object for CidToGidMap { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Name(name) if name == "Identity" => { + Ok(CidToGidMap::Identity) + } + p @ Primitive::Stream(_) | p @ Primitive::Reference(_) => { + let stream: Stream<()> = Stream::from_primitive(p, resolve)?; + let data = stream.data(resolve)?; + Ok(CidToGidMap::Table(data.chunks_exact(2).map(|c| (c[0] as u16) << 8 | c[1] as u16).collect())) + }, + p => Err(PdfError::UnexpectedPrimitive { + expected: "/Identity or Stream", + found: p.get_debug_name() + }) + } + } +} +impl ObjectWrite for CidToGidMap { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + CidToGidMap::Identity => Ok(Name::from("Identity").into()), + CidToGidMap::Table(ref table) => { + let mut data = Vec::with_capacity(table.len() * 2); + data.extend(table.iter().flat_map(|&v| <[u8; 2]>::into_iter(v.to_be_bytes()))); + Stream::new((), data).to_primitive(update) + } + } + } +} + +impl Object for Font { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut dict = p.resolve(resolve)?.into_dictionary()?; + + let subtype = t!(FontType::from_primitive(dict.require("Font", "Subtype")?, resolve)); + + // BaseFont is required for all FontTypes except Type3 + dict.expect("Font", "Type", "Font", true)?; + let base_font_primitive = dict.get("BaseFont"); + let base_font = match (base_font_primitive, subtype) { + (Some(name), _) => Some(t!(t!(name.clone().resolve(resolve)).into_name(), name)), + (None, FontType::Type3) => None, + (_, _) => return Err(PdfError::MissingEntry { + typ: "Font", + field: "BaseFont".to_string() + }) + }; + + let encoding = dict.remove("Encoding").map(|p| Object::from_primitive(p, resolve)).transpose()?; + + let to_unicode = match dict.remove("ToUnicode") { + Some(p) => Some(Object::from_primitive(p, resolve)?), + None => None + }; + let _other = dict.clone(); + let data = match subtype { + FontType::Type0 => FontData::Type0(Type0Font::from_dict(dict, resolve)?), + FontType::Type1 => FontData::Type1(TFont::from_dict(dict, resolve)?), + FontType::TrueType => FontData::TrueType(TFont::from_dict(dict, resolve)?), + FontType::CIDFontType0 => FontData::CIDFontType0(CIDFont::from_dict(dict, resolve)?), + FontType::CIDFontType2 => FontData::CIDFontType2(CIDFont::from_dict(dict, resolve)?), + _ => FontData::Other(dict) + }; + + Ok(Font { + subtype, + name: base_font, + data, + encoding, + to_unicode, + _other + }) + } +} +impl ObjectWrite for Font { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let mut dict = match self.data { + FontData::CIDFontType0(ref d) | FontData::CIDFontType2(ref d) => d.to_dict(update)?, + FontData::TrueType(ref d) | FontData::Type1(ref d) => d.to_dict(update)?, + FontData::Type0(ref d) => d.to_dict(update)?, + FontData::Other(ref dict) => dict.clone(), + }; + + if let Some(ref to_unicode) = self.to_unicode { + dict.insert("ToUnicode", to_unicode.to_primitive(update)?); + } + if let Some(ref encoding) = self.encoding { + dict.insert("Encoding", encoding.to_primitive(update)?); + } + if let Some(ref name) = self.name { + dict.insert("BaseFont", name.to_primitive(update)?); + } + + let subtype = match self.data { + FontData::Type0(_) => FontType::Type0, + FontData::Type1(_) => FontType::Type1, + FontData::TrueType(_) => FontType::TrueType, + FontData::CIDFontType0(_) => FontType::CIDFontType0, + FontData::CIDFontType2(_) => FontType::CIDFontType2, + FontData::Other(_) => bail!("unimplemented") + }; + dict.insert("Subtype", subtype.to_primitive(update)?); + dict.insert("Type", Name::from("Font")); + + Ok(Primitive::Dictionary(dict)) + } +} + + +#[derive(Debug)] +pub struct Widths { + values: Vec, + default: f32, + first_char: usize +} +impl Widths { + pub fn get(&self, cid: usize) -> f32 { + if cid < self.first_char { + self.default + } else { + self.values.get(cid - self.first_char).cloned().unwrap_or(self.default) + } + } + fn new(default: f32) -> Widths { + Widths { + default, + values: Vec::new(), + first_char: 0 + } + } + fn ensure_cid(&mut self, cid: usize) { + if let Some(offset) = cid.checked_sub(self.first_char) { // cid may be < first_char + // reserve difference of offset to capacity + // if enough capacity to cover offset, saturates to zero, and reserve will do nothing + self.values.reserve(offset.saturating_sub(self.values.capacity())); + } + } + #[allow(clippy::float_cmp)] // TODO + fn set(&mut self, cid: usize, width: f32) { + self._set(cid, width); + debug_assert_eq!(self.get(cid), width); + } + fn _set(&mut self, cid: usize, width: f32) { + use std::iter::repeat; + + if self.values.is_empty() { + self.first_char = cid; + self.values.push(width); + return; + } + + if cid == self.first_char + self.values.len() { + self.values.push(width); + return; + } + + if cid < self.first_char { + self.values.splice(0 .. 0, repeat(self.default).take(self.first_char - cid)); + self.first_char = cid; + self.values[0] = width; + return; + } + + if cid > self.values.len() + self.first_char { + self.ensure_cid(cid); + self.values.extend(repeat(self.default).take(cid - self.first_char - self.values.len())); + self.values.push(width); + return; + } + + self.values[cid - self.first_char] = width; + } +} +impl Font { + pub fn embedded_data(&self, resolve: &impl Resolve) -> Option>> { + match self.data { + FontData::Type0(ref t) => t.descendant_fonts.get(0).and_then(|f| f.embedded_data(resolve)), + FontData::CIDFontType0(ref c) | FontData::CIDFontType2(ref c) => c.font_descriptor.data(resolve), + FontData::Type1(ref t) | FontData::TrueType(ref t) => t.font_descriptor.as_ref().and_then(|d| d.data(resolve)), + _ => None + } + } + pub fn is_cid(&self) -> bool { + matches!(self.data, FontData::Type0(_) | FontData::CIDFontType0(_) | FontData::CIDFontType2(_)) + } + pub fn cid_to_gid_map(&self) -> Option<&CidToGidMap> { + match self.data { + FontData::Type0(ref inner) => inner.descendant_fonts.get(0).and_then(|f| f.cid_to_gid_map()), + FontData::CIDFontType0(ref f) | FontData::CIDFontType2(ref f) => f.cid_to_gid_map.as_ref(), + _ => None + } + } + pub fn encoding(&self) -> Option<&Encoding> { + self.encoding.as_ref() + } + pub fn info(&self) -> Option<&TFont> { + match self.data { + FontData::Type1(ref info) => Some(info), + FontData::TrueType(ref info) => Some(info), + _ => None + } + } + pub fn widths(&self, resolve: &impl Resolve) -> Result> { + match self.data { + FontData::Type0(ref t0) => t0.descendant_fonts[0].widths(resolve), + FontData::Type1(ref info) | FontData::TrueType(ref info) => { + match *info { + TFont { first_char: Some(first), ref widths, .. } => Ok(Some(Widths { + default: 0.0, + first_char: first as usize, + values: widths.as_ref().cloned().unwrap_or_default() + })), + _ => Ok(None) + } + }, + FontData::CIDFontType0(ref cid) | FontData::CIDFontType2(ref cid) => { + let mut widths = Widths::new(cid.default_width); + let mut iter = cid.widths.iter(); + while let Some(p) = iter.next() { + let c1 = p.as_usize()?; + match iter.next() { + Some(Primitive::Array(array)) => { + widths.ensure_cid(c1 + array.len() - 1); + for (i, w) in array.iter().enumerate() { + widths.set(c1 + i, w.as_number()?); + } + }, + Some(&Primitive::Reference(r)) => { + match resolve.resolve(r)? { + Primitive::Array(array) => { + widths.ensure_cid(c1 + array.len() - 1); + for (i, w) in array.iter().enumerate() { + widths.set(c1 + i, w.as_number()?); + } + } + p => return Err(PdfError::Other { msg: format!("unexpected primitive in W array: {:?}", p) }) + } + } + Some(&Primitive::Integer(c2)) => { + let w = try_opt!(iter.next()).as_number()?; + for c in c1 ..= (c2 as usize) { + widths.set(c, w); + } + }, + p => return Err(PdfError::Other { msg: format!("unexpected primitive in W array: {:?}", p) }) + } + } + Ok(Some(widths)) + }, + _ => Ok(None) + } + } + pub fn to_unicode(&self, resolve: &impl Resolve) -> Option> { + self.to_unicode.as_ref().map(|s| (**s).data(resolve).and_then(|d| parse_cmap(&d))) + } +} +#[derive(Object, ObjectWrite, Debug, DataSize, DeepClone)] +pub struct TFont { + #[pdf(key="BaseFont")] + pub base_font: Option, + + /// per spec required, but some files lack it. + #[pdf(key="FirstChar")] + pub first_char: Option, + + /// same + #[pdf(key="LastChar")] + pub last_char: Option, + + #[pdf(key="Widths")] + pub widths: Option>, + + #[pdf(key="FontDescriptor")] + pub font_descriptor: Option +} + +#[derive(Object, ObjectWrite, Debug, DataSize, DeepClone)] +pub struct Type0Font { + #[pdf(key="DescendantFonts")] + pub descendant_fonts: Vec>, + + #[pdf(key="ToUnicode")] + pub to_unicode: Option>>, +} + +#[derive(Object, ObjectWrite, Debug, DataSize, DeepClone)] +pub struct CIDFont { + #[pdf(key="CIDSystemInfo")] + pub system_info: Dictionary, + + #[pdf(key="FontDescriptor")] + pub font_descriptor: FontDescriptor, + + #[pdf(key="DW", default="1000.")] + pub default_width: f32, + + #[pdf(key="W")] + pub widths: Vec, + + #[pdf(key="CIDToGIDMap")] + pub cid_to_gid_map: Option, + + #[pdf(other)] + pub _other: Dictionary +} + + +#[derive(Object, ObjectWrite, Debug, DataSize, DeepClone)] +pub struct FontDescriptor { + #[pdf(key="FontName")] + pub font_name: Name, + + #[pdf(key="FontFamily")] + pub font_family: Option, + + #[pdf(key="FontStretch")] + pub font_stretch: Option, + + #[pdf(key="FontWeight")] + pub font_weight: Option, + + #[pdf(key="Flags")] + pub flags: u32, + + #[pdf(key="FontBBox")] + pub font_bbox: Rectangle, + + #[pdf(key="ItalicAngle")] + pub italic_angle: f32, + + // required as per spec, but still missing in some cases + #[pdf(key="Ascent")] + pub ascent: Option, + + #[pdf(key="Descent")] + pub descent: Option, + + #[pdf(key="Leading", default="0.")] + pub leading: f32, + + #[pdf(key="CapHeight")] + pub cap_height: Option, + + #[pdf(key="XHeight", default="0.")] + pub xheight: f32, + + #[pdf(key="StemV", default="0.")] + pub stem_v: f32, + + #[pdf(key="StemH", default="0.")] + pub stem_h: f32, + + #[pdf(key="AvgWidth", default="0.")] + pub avg_width: f32, + + #[pdf(key="MaxWidth", default="0.")] + pub max_width: f32, + + #[pdf(key="MissingWidth", default="0.")] + pub missing_width: f32, + + #[pdf(key="FontFile")] + pub font_file: Option>>, + + #[pdf(key="FontFile2")] + pub font_file2: Option>>, + + #[pdf(key="FontFile3")] + pub font_file3: Option>>, + + #[pdf(key="CharSet")] + pub char_set: Option +} +impl FontDescriptor { + pub fn data(&self, resolve: &impl Resolve) -> Option>> { + if let Some(ref s) = self.font_file { + Some((**s).data(resolve)) + } else if let Some(ref s) = self.font_file2 { + Some((**s).data(resolve)) + } else if let Some(ref s) = self.font_file3 { + Some((**s).data(resolve)) + } else { + None + } + } +} + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +#[pdf(key="Subtype")] +pub enum FontTypeExt { + Type1C, + CIDFontType0C, + OpenType +} +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct FontStream3 { + #[pdf(key="Subtype")] + pub subtype: FontTypeExt +} + +#[derive(Object, ObjectWrite, Debug, PartialEq, Eq, PartialOrd, Ord, Clone, DataSize, DeepClone)] +pub enum FontStretch { + UltraCondensed, + ExtraCondensed, + Condensed, + SemiCondensed, + Normal, + SemiExpanded, + Expanded, + ExtraExpanded, + UltraExpanded +} + +#[derive(Clone, Debug, Default)] +pub struct ToUnicodeMap { + // todo: reduce allocations + inner: HashMap +} +impl ToUnicodeMap { + pub fn new() -> Self { + Self::default() + } + /// Create a new ToUnicodeMap from key/value pairs. + /// + /// subject to change + pub fn create(iter: impl Iterator) -> Self { + ToUnicodeMap { inner: iter.collect() } + } + pub fn get(&self, gid: u16) -> Option<&str> { + self.inner.get(&gid).map(|s| s.as_str()) + } + pub fn insert(&mut self, gid: u16, unicode: SmallString) { + self.inner.insert(gid, unicode); + } + pub fn iter(&self) -> impl Iterator { + self.inner.iter().map(|(&gid, unicode)| (gid, unicode.as_str())) + } + pub fn len(&self) -> usize { + self.inner.len() + } + pub fn is_empty(&self) -> bool { + self.inner.is_empty() + } +} + +/// helper function to decode UTF-16-BE data +/// takes a slice of u8 and returns an iterator for char or an decoding error +pub fn utf16be_to_char( + data: &[u8], +) -> impl Iterator> + '_ { + char::decode_utf16(data.chunks_exact(2).map(|w| u16::from_be_bytes([w[0], w[1]]))) +} +/// converts UTF16-BE to a string replacing illegal/unknown characters +pub fn utf16be_to_string_lossy(data: &[u8]) -> String { + utf16be_to_char(data) + .map(|r| r.unwrap_or(std::char::REPLACEMENT_CHARACTER)) + .collect() +} +/// converts UTF16-BE to a string errors out in illegal/unknonw characters +pub fn utf16be_to_string(data: &[u8]) -> pdf::error::Result { + utf16be_to_char(data) + .map(|r| r.map_err(|_| PdfError::Utf16Decode)) + .collect() +} +fn parse_cid(s: &PdfString) -> Result { + let b = s.as_bytes(); + match b.len() { + 2 => Ok(u16::from_be_bytes(b.try_into().unwrap())), + 1 => Ok(b[0] as u16), + _ => Err(PdfError::CidDecode), + } +} +fn parse_cmap(data: &[u8]) -> Result { + let mut lexer = Lexer::new(data); + let mut map = ToUnicodeMap::new(); + while let Ok(substr) = lexer.next() { + match substr.as_slice() { + b"beginbfchar" => loop { + let a = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::STRING); + if a.is_err() { + break; + } + let b = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::STRING); + match (a, b) { + (Ok(Primitive::String(cid_data)), Ok(Primitive::String(unicode_data))) => { + let cid = parse_cid(&cid_data)?; + let bytes = unicode_data.as_bytes(); + match utf16be_to_string(bytes) { + Ok(unicode) => map.insert(cid, unicode), + Err(_) => warn!("invalid unicode for cid {cid} {bytes:?}"), + } + } + _ => break, + } + }, + b"beginbfrange" => loop { + let a = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::STRING); + if a.is_err() { + break; + } + let b = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::STRING); + let c = parse_with_lexer(&mut lexer, &NoResolve, ParseFlags::STRING | ParseFlags::ARRAY); + match (a, b, c) { + ( + Ok(Primitive::String(cid_start_data)), + Ok(Primitive::String(cid_end_data)), + Ok(Primitive::String(unicode_data)), + ) if unicode_data.data.len() > 0 => { + let cid_start = parse_cid(&cid_start_data)?; + let cid_end = parse_cid(&cid_end_data)?; + let mut unicode_data = unicode_data.into_bytes(); + + for cid in cid_start..=cid_end { + match utf16be_to_string(&unicode_data) { + Ok(unicode) => map.insert(cid, unicode), + Err(_) => warn!("invalid unicode for cid {cid} {unicode_data:?}"), + } + let last = unicode_data.last_mut().unwrap(); + if *last < 255 { + *last += 1; + } else { + break; + } + } + } + ( + Ok(Primitive::String(cid_start_data)), + Ok(Primitive::String(cid_end_data)), + Ok(Primitive::Array(unicode_data_arr)), + ) => { + let cid_start = parse_cid(&cid_start_data)?; + let cid_end = parse_cid(&cid_end_data)?; + + for (cid, unicode_data) in (cid_start..=cid_end).zip(unicode_data_arr) { + let bytes = unicode_data.as_string()?.as_bytes(); + match utf16be_to_string(bytes) { + Ok(unicode) => map.insert(cid, unicode), + Err(_) => warn!("invalid unicode for cid {cid} {bytes:?}"), + } + } + } + _ => break, + } + }, + b"endcmap" => break, + _ => {} + } + } + + Ok(map) +} + +fn write_cid(w: &mut String, cid: u16) { + write!(w, "<{:04X}>", cid).unwrap(); +} +fn write_unicode(out: &mut String, unicode: &str) { + let mut buf = [0; 2]; + write!(out, "<").unwrap(); + for c in unicode.chars() { + let slice = c.encode_utf16(&mut buf); + for &word in slice.iter() { + write!(out, "{:04X}", word).unwrap(); + } + } + write!(out, ">").unwrap(); +} +pub fn write_cmap(map: &ToUnicodeMap) -> String { + let mut buf = String::new(); + let mut list: Vec<(u16, &str)> = map.inner.iter().map(|(&cid, s)| (cid, s.as_str())).collect(); + list.sort(); + + + let mut remaining = &list[..]; + let blocks = std::iter::from_fn(move || { + if remaining.len() == 0 { + return None; + } + let first_cid = remaining[0].0; + let seq_len = remaining.iter().enumerate().take_while(|&(i, &(cid, _))| cid == first_cid + i as u16).count(); + + let (block, tail) = remaining.split_at(seq_len); + remaining = tail; + Some(block) + }); + + for (single, group) in &blocks.group_by(|b| b.len() == 1) { + if single { + writeln!(buf, "beginbfchar").unwrap(); + for block in group { + for &(cid, uni) in block { + write_cid(&mut buf, cid); + write!(buf, " ").unwrap(); + write_unicode(&mut buf, uni); + writeln!(buf).unwrap(); + } + } + writeln!(buf, "endbfchar").unwrap(); + } else { + writeln!(buf, "beginbfrange").unwrap(); + for block in group { + write_cid(&mut buf, block[0].0); + write!(buf, " ").unwrap(); + write_cid(&mut buf, block.last().unwrap().0); + write!(buf, " [").unwrap(); + for (i, &(_cid, u)) in block.iter().enumerate() { + if i > 0 { + write!(buf, ", ").unwrap(); + } + write_unicode(&mut buf, u); + } + writeln!(buf, "]").unwrap(); + } + writeln!(buf, "endbfrange").unwrap(); + } + } + + buf +} + +#[cfg(test)] +mod tests { + + use crate::font::{utf16be_to_string, utf16be_to_char, utf16be_to_string_lossy}; + #[test] + fn utf16be_to_string_quick() { + let v = vec![0x20, 0x09]; + let s = utf16be_to_string(&v); + assert_eq!(s.unwrap(), "\u{2009}"); + assert!(!v.is_empty()); + } + + #[test] + fn test_to_char() { + // 𝄞music + let v = [ + 0xD8, 0x34, 0xDD, 0x1E, 0x00, 0x6d, 0x00, 0x75, 0x00, 0x73, 0xDD, 0x1E, 0x00, 0x69, 0x00, + 0x63, 0xD8, 0x34, + ]; + + assert_eq!( + utf16be_to_char(&v) + .map(|r| r.map_err(|e| e.unpaired_surrogate())) + .collect::>(), + vec![ + Ok('𝄞'), + Ok('m'), + Ok('u'), + Ok('s'), + Err(0xDD1E), + Ok('i'), + Ok('c'), + Err(0xD834) + ] + ); + + let mut lossy = String::from("𝄞mus"); + lossy.push(std::char::REPLACEMENT_CHARACTER); + lossy.push('i'); + lossy.push('c'); + lossy.push(std::char::REPLACEMENT_CHARACTER); + + let r = utf16be_to_string(&v); + if let Err(r) = r { + // FIXME: compare against PdfError::Utf16Decode variant + assert_eq!(r.to_string(), "UTF16 decode error"); + } + assert_eq!(utf16be_to_string(&v[..8]).unwrap(), String::from("𝄞mu")); + assert_eq!(utf16be_to_string_lossy(&v), lossy); + } +} diff --git a/src-pdfrs/pdf/src/lib.rs b/src-pdfrs/pdf/src/lib.rs new file mode 100644 index 0000000..43f2fda --- /dev/null +++ b/src-pdfrs/pdf/src/lib.rs @@ -0,0 +1,28 @@ +#![allow(non_camel_case_types)] /* TODO temporary becaues of pdf_derive */ +#![allow(unused_doc_comments)] // /* TODO temporary because of err.rs */ +#![allow(clippy::len_zero, clippy::should_implement_trait, clippy::manual_map, clippy::from_over_into)] + +#[macro_use] extern crate pdf_derive; +#[macro_use] extern crate snafu; +#[macro_use] extern crate log; + +#[macro_use] +pub mod error; +pub mod object; +pub mod xref; +pub mod primitive; +pub mod file; +pub mod backend; +pub mod content; +pub mod parser; +pub mod font; +pub mod any; +pub mod encoding; +pub mod build; + +// mod content; +pub mod enc; +pub mod crypt; + +// pub use content::*; +pub use crate::error::PdfError; diff --git a/src-pdfrs/pdf/src/macros.rs b/src-pdfrs/pdf/src/macros.rs new file mode 100644 index 0000000..4ff8dcd --- /dev/null +++ b/src-pdfrs/pdf/src/macros.rs @@ -0,0 +1,49 @@ +macro_rules! write_entry { + ($out:expr, $key:tt, $val:expr) => { + { + $out.write(b" ")?; + $key.serialize($out)?; + $out.write(b" ")?; + $val.serialize($out)?; + $out.write(b"\n")?; + } + } +} +macro_rules! write_entrys { + ($out:expr, $key:tt << $val:expr $(,)*) => { + write_entry!($out, $key, $val); + }; + ($out:expr, $key:tt << $val:expr, $($rest:tt)*) => { + { + write_entry!($out, $key, $val); + write_entrys!($out, $($rest)*); + } + }; + ($out:expr, $key:tt ? << $val:expr $(,)*) => { + match &$val { + &Some(ref v) => write_entry!($out, $key, v), + &None => {} + } + }; + ($out:expr, $key:tt ? << $val:expr, $($rest:tt)*) => { + { + match &$val { + &Some(ref v) => write_entry!($out, $key, v), + &None => {} + } + write_entrys!($out, $($rest)*); + } + } +} + +macro_rules! write_dict { + ($out:expr, $($rest:tt)*) => { + { + write!($out, "<<\n")?; + write_entrys!($out, $($rest)*); + write!($out, ">>")?; + } + }; +} + + diff --git a/src-pdfrs/pdf/src/object/color.rs b/src-pdfrs/pdf/src/object/color.rs new file mode 100644 index 0000000..b63c917 --- /dev/null +++ b/src-pdfrs/pdf/src/object/color.rs @@ -0,0 +1,180 @@ +use datasize::DataSize; +use crate as pdf; +use crate::object::*; +use crate::error::*; + +#[derive(Object, Debug, DataSize, DeepClone, ObjectWrite)] +pub struct IccInfo { + #[pdf(key="N")] + pub components: u32, + + #[pdf(key="Alternate")] + pub alternate: Option>, + + #[pdf(key="Range")] + pub range: Option>, + + #[pdf(key="Metadata")] + pub metadata: Option>, +} + +#[derive(Debug, Clone, DeepClone)] +pub enum ColorSpace { + DeviceGray, + DeviceRGB, + DeviceCMYK, + DeviceN { names: Vec, alt: Box, tint: Function, attr: Option }, + CalGray(Dictionary), + CalRGB(Dictionary), + CalCMYK(Dictionary), + Indexed(Box, u8, Arc<[u8]>), + Separation(Name, Box, Function), + Icc(RcRef>), + Pattern, + Named(Name), + Other(Vec) +} +impl DataSize for ColorSpace { + const IS_DYNAMIC: bool = true; + const STATIC_HEAP_SIZE: usize = 0; + + #[inline] + fn estimate_heap_size(&self) -> usize { + match *self { + ColorSpace::DeviceGray | ColorSpace::DeviceRGB | ColorSpace::DeviceCMYK => 0, + ColorSpace::DeviceN { ref names, ref alt, ref tint, ref attr } => { + names.estimate_heap_size() + + alt.estimate_heap_size() + + tint.estimate_heap_size() + + attr.estimate_heap_size() + } + ColorSpace::CalGray(ref d) | ColorSpace::CalRGB(ref d) | ColorSpace::CalCMYK(ref d) => { + d.estimate_heap_size() + } + ColorSpace::Indexed(ref cs, _, ref data) => { + cs.estimate_heap_size() + data.estimate_heap_size() + } + ColorSpace::Separation(ref name, ref cs, ref f) => { + name.estimate_heap_size() + cs.estimate_heap_size() + f.estimate_heap_size() + } + ColorSpace::Icc(ref s) => s.estimate_heap_size(), + ColorSpace::Pattern => 0, + ColorSpace::Other(ref v) => v.estimate_heap_size(), + ColorSpace::Named(ref n) => n.estimate_heap_size() + } + } +} + +fn get_index(arr: &[Primitive], idx: usize) -> Result<&Primitive> { + arr.get(idx).ok_or(PdfError::Bounds { index: idx, len: arr.len() }) +} + +impl Object for ColorSpace { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + ColorSpace::from_primitive_depth(p, resolve, 5) + } +} +impl ColorSpace { + fn from_primitive_depth(p: Primitive, resolve: &impl Resolve, depth: usize) -> Result { + let p = p.resolve(resolve)?; + + if let Ok(name) = p.as_name() { + let cs = match name { + "DeviceGray" => ColorSpace::DeviceGray, + "DeviceRGB" => ColorSpace::DeviceRGB, + "DeviceCMYK" => ColorSpace::DeviceCMYK, + "Pattern" => ColorSpace::Pattern, + name => ColorSpace::Named(name.into()), + }; + return Ok(cs); + } + let arr = t!(p.into_array()); + let typ_p = t!(get_index(&arr, 0)).clone().resolve(resolve)?; + let typ = t!(typ_p.as_name()); + + if depth == 0 { + bail!("ColorSpace base recursion"); + } + match typ { + "Indexed" => { + let base = Box::new(t!(ColorSpace::from_primitive_depth(t!(get_index(&arr, 1)).clone(), resolve, depth-1))); + let hival = t!(t!(get_index(&arr, 2)).as_u8()); + let lookup = match t!(get_index(&arr, 3)) { + &Primitive::Reference(r) => resolve.resolve(r)?, + p => p.clone() + }; + let lookup = match lookup { + Primitive::String(string) => { + let data: Vec = string.into_bytes().into(); + data.into() + } + Primitive::Stream(stream) => { + let s: Stream::<()> = Stream::from_stream(stream, resolve)?; + t!(s.data(resolve)) + }, + p => return Err(PdfError::UnexpectedPrimitive { + expected: "String or Stream", + found: p.get_debug_name() + }) + }; + Ok(ColorSpace::Indexed(base, hival, lookup)) + } + "Separation" => { + let name = t!(t!(get_index(&arr, 1)).clone().into_name()); + let alternate = Box::new(t!(ColorSpace::from_primitive_depth(t!(get_index(&arr, 2)).clone(), resolve, depth-1))); + let tint = t!(Function::from_primitive(t!(get_index(&arr, 3)).clone(), resolve)); + Ok(ColorSpace::Separation(name, alternate, tint)) + } + "ICCBased" => { + let s = t!(RcRef::from_primitive(t!(get_index(&arr, 1)).clone(), resolve)); + Ok(ColorSpace::Icc(s)) + } + "DeviceN" => { + let names = t!(Object::from_primitive(t!(get_index(&arr, 1)).clone(), resolve)); + let alt = t!(Object::from_primitive(t!(get_index(&arr, 2)).clone(), resolve)); + let tint = t!(Function::from_primitive(t!(get_index(&arr, 3)).clone(), resolve)); + let attr = arr.get(4).map(|p| Dictionary::from_primitive(p.clone(), resolve)).transpose()?; + + Ok(ColorSpace::DeviceN { names, alt, tint, attr}) + } + "CalGray" => { + let dict = Dictionary::from_primitive(t!(get_index(&arr, 1)).clone(), resolve)?; + Ok(ColorSpace::CalGray(dict)) + } + "CalRGB" => { + let dict = Dictionary::from_primitive(t!(get_index(&arr, 1)).clone(), resolve)?; + Ok(ColorSpace::CalRGB(dict)) + } + "CalCMYK" => { + let dict = Dictionary::from_primitive(t!(get_index(&arr, 1)).clone(), resolve)?; + Ok(ColorSpace::CalCMYK(dict)) + } + "Pattern" => { + Ok(ColorSpace::Pattern) + } + _ => Ok(ColorSpace::Other(arr)) + } + } +} +impl ObjectWrite for ColorSpace { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match *self { + ColorSpace::DeviceCMYK => Ok(Primitive::name("DeviceCMYK")), + ColorSpace::DeviceRGB => Ok(Primitive::name("DeviceRGB")), + ColorSpace::Indexed(ref base, hival, ref lookup) => { + let base = base.to_primitive(update)?; + let hival = Primitive::Integer(hival.into()); + let lookup = if lookup.len() < 100 { + PdfString::new((**lookup).into()).into() + } else { + Stream::new((), lookup.clone()).to_primitive(update)? + }; + Ok(Primitive::Array(vec![Primitive::name("Indexed"), base, hival, lookup])) + } + ref p => { + dbg!(p); + unimplemented!() + } + } + } +} diff --git a/src-pdfrs/pdf/src/object/function.rs b/src-pdfrs/pdf/src/object/function.rs new file mode 100644 index 0000000..7807665 --- /dev/null +++ b/src-pdfrs/pdf/src/object/function.rs @@ -0,0 +1,477 @@ +use crate as pdf; +use crate::object::*; +use crate::error::*; +use itertools::izip; +use datasize::DataSize; + +#[derive(Object, Debug, Clone, ObjectWrite)] +struct RawFunction { + #[pdf(key="FunctionType")] + function_type: u32, + + #[pdf(key="Domain")] + domain: Vec, + + #[pdf(key="Range")] + range: Option>, + + #[pdf(key="Size")] + size: Option>, + + #[pdf(key="BitsPerSample")] + _bits_per_sample: Option, + + #[pdf(key="Order", default="1")] + order: u32, + + #[pdf(key="Encode")] + encode: Option>, + + #[pdf(key="Decode")] + decode: Option>, + + #[pdf(other)] + other: Dictionary +} + +#[derive(Object, Debug, Clone)] +struct Function2 { + #[pdf(key="C0")] + c0: Option>, + + #[pdf(key="C1")] + c1: Option>, + + #[pdf(key="N")] + exponent: f32, +} + +#[derive(Debug, Clone, DataSize)] +pub enum Function { + Sampled(SampledFunction), + Interpolated(Vec), + Stiching, + Calculator, + PostScript { func: PsFunc, domain: Vec, range: Vec }, +} +impl Function { + pub fn apply(&self, x: &[f32], out: &mut [f32]) -> Result<()> { + match *self { + Function::Sampled(ref func) => { + func.apply(x, out) + } + Function::Interpolated(ref parts) => { + if parts.len() != out.len() { + bail!("incorrect output length: expected {}, found {}.", parts.len(), out.len()) + } + for (f, y) in parts.iter().zip(out) { + *y = f.apply(x[0]); + } + Ok(()) + } + Function::PostScript { ref func, .. } => func.exec(x, out), + _ => bail!("unimplemted function {:?}", self) + } + } + pub fn input_dim(&self) -> usize { + match *self { + Function::PostScript { ref domain, .. } => domain.len() / 2, + Function::Sampled(ref f) => f.input.len(), + _ => panic!() + } + } + pub fn output_dim(&self) -> usize { + match *self { + Function::PostScript { ref range, .. } => range.len() / 2, + Function::Sampled(ref f) => f.output.len(), + _ => panic!() + } + } +} +impl FromDict for Function { + fn from_dict(dict: Dictionary, resolve: &impl Resolve) -> Result { + use std::f32::INFINITY; + let raw = RawFunction::from_dict(dict, resolve)?; + match raw.function_type { + 2 => { + let f2 = Function2::from_dict(raw.other, resolve)?; + + let n_dim = match (raw.range.as_ref(), f2.c0.as_ref(), f2.c1.as_ref()) { + (Some(range), _, _) => range.len() / 2, + (_, Some(c0), _) => c0.len(), + (_, _, Some(c1)) => c1.len(), + _ => bail!("unknown dimensions") + }; + let mut parts = Vec::with_capacity(n_dim); + let input_range = (raw.domain[0], raw.domain[1]); + for dim in 0 .. n_dim { + let output_range = ( + raw.range.as_ref().and_then(|r| r.get(2*dim).cloned()).unwrap_or(-INFINITY), + raw.range.as_ref().and_then(|r| r.get(2*dim+1).cloned()).unwrap_or(INFINITY) + ); + let c0 = f2.c0.as_ref().and_then(|c0| c0.get(dim).cloned()).unwrap_or(0.0); + let c1 = f2.c1.as_ref().and_then(|c1| c1.get(dim).cloned()).unwrap_or(1.0); + let exponent = f2.exponent; + parts.push(InterpolatedFunctionDim { + input_range, output_range, c0, c1, exponent + }); + } + Ok(Function::Interpolated(parts)) + }, + i => { + dbg!(raw); + bail!("unsupported function type {}", i) + } + } + } +} +impl Object for Function { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Dictionary(dict) => Self::from_dict(dict, resolve), + Primitive::Stream(s) => { + let stream = Stream::::from_stream(s, resolve)?; + let data = stream.data(resolve)?; + match stream.info.function_type { + 4 => { + let s = std::str::from_utf8(&data)?; + let func = PsFunc::parse(s)?; + let info = stream.info.info; + Ok(Function::PostScript { func, domain: info.domain, range: info.range.unwrap() }) + }, + 0 => { + let info = stream.info.info; + let order = match info.order { + 1 => Interpolation::Linear, + 3 => Interpolation::Cubic, + n => bail!("Invalid interpolation order {}", n), + }; + + let size = try_opt!(info.size); + let range = try_opt!(info.range); + let encode = info.encode.unwrap_or_else(|| size.iter().flat_map(|&n| [0.0, (n-1) as f32]).collect()); + let decode = info.decode.unwrap_or_else(|| range.clone()); + + Ok(Function::Sampled(SampledFunction { + input: izip!(info.domain.chunks_exact(2), encode.chunks_exact(2), size.iter()).map(|(c, e, &s)| { + SampledFunctionInput { + domain: (c[0], c[1]), + encode_offset: e[0], + encode_scale: e[1], + size: s as usize, + } + }).collect(), + output: decode.chunks_exact(2).map(|c| SampledFunctionOutput { + offset: c[0], + scale: (c[1] - c[0]) / 255., + }).collect(), + data, + order, + range, + })) + } + ref p => bail!("found a function stream with type {:?}", p) + } + }, + Primitive::Reference(r) => Self::from_primitive(resolve.resolve(r)?, resolve), + _ => bail!("double indirection") + } + } +} +impl ObjectWrite for Function { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + unimplemented!() + /* + let dict = match self { + Function::Interpolated(parts) => { + let first: &InterpolatedFunctionDim = try_opt!(parts.get(0)); + let f2 = Function2 { + c0: parts.iter().map(|p| p.c0).collect(), + c1: parts.iter().map(|p| p.c0).collect(), + exponent: first.exponent + }; + let f = RawFunction { + function_type: 2, + domain: vec![first.input_range.0, first.input_range.1], + range: parts.iter().flat_map(|p| [p.output_range.0, p.output_range.1]).collect(), + decode: None, + encode: None, + order + }; + + } + } + */ + } +} +impl DeepClone for Function { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok(self.clone()) + } +} + +#[derive(Debug, Clone, DataSize)] +struct SampledFunctionInput { + domain: (f32, f32), + encode_offset: f32, + encode_scale: f32, + size: usize, +} +impl SampledFunctionInput { + fn map(&self, x: f32) -> (usize, usize, f32) { + let x = x.clamp(self.domain.0, self.domain.1); + let y = x.mul_add(self.encode_scale, self.encode_offset); + (y.floor() as usize, self.size, y.fract()) + } +} + +#[derive(Debug, Clone, DataSize)] +struct SampledFunctionOutput { + offset: f32, + scale: f32 +} +impl SampledFunctionOutput { + fn map(&self, x: f32) -> f32 { + x.mul_add(self.scale, self.offset) + } +} + +#[derive(Debug, Clone, DataSize)] +enum Interpolation { + Linear, + #[allow(dead_code)] // TODO + Cubic, +} + +#[derive(Debug, Clone, DataSize)] +pub struct SampledFunction { + input: Vec, + output: Vec, + data: Arc<[u8]>, + order: Interpolation, + range: Vec, +} +impl SampledFunction { + fn apply(&self, x: &[f32], out: &mut [f32]) -> Result<()> { + if x.len() != self.input.len() { + bail!("input dimension mismatch {} != {}", x.len(), self.input.len()); + } + let n_out = out.len(); + if out.len() * 2 != self.range.len() { + bail!("output dimension mismatch 2 * {} != {}", out.len(), self.range.len()) + } + match x.len() { + 1 => { + match self.order { + Interpolation::Linear => { + let (i, _, s) = self.input[0].map(x[0]); + let idx = i * n_out; + + for (o, &a) in out.iter_mut().zip(&self.data[idx..]) { + *o = a as f32 * (1. - s); + } + for (o, &b) in out.iter_mut().zip(&self.data[idx + n_out..]) { + *o += b as f32 * s; + } + } + _ => unimplemented!() + } + } + 2 => match self.order { + Interpolation::Linear => { + let (i0, s0, f0) = self.input[0].map(x[0]); + let (i1, _, f1) = self.input[1].map(x[1]); + let (j0, j1) = (i0+1, i1+1); + let (g0, g1) = (1. - f0, 1. - f1); + + out.fill(0.0); + let mut add = |i0, i1, f| { + let idx = (i0 + s0 * i1) * n_out; + + if let Some(part) = self.data.get(idx .. idx+n_out) { + for (o, &b) in out.iter_mut().zip(part) { + *o += f * b as f32; + } + } + }; + + add(i0, i1, g0 * g1); + add(j0, i1, f0 * g1); + add(i0, j1, g0 * f1); + add(j0, j1, f0 * f1); + } + _ => unimplemented!() + } + 3 => match self.order { + Interpolation::Linear => { + let (i0, s0, f0) = self.input[0].map(x[0]); + let (i1, s1, f1) = self.input[1].map(x[1]); + let (i2, _, f2) = self.input[2].map(x[2]); + let (j0, j1, j2) = (i0+1, i1+1, i2+1); + let (g0, g1, g2) = (1. - f0, 1. - f1, 1. - f2); + + out.fill(0.0); + let mut add = |i0, i1, i2, f| { + let idx = (i0 + s0 * (i1 + s1 * i2)) * n_out; + + if let Some(part) = self.data.get(idx .. idx+n_out) { + for (o, &b) in out.iter_mut().zip(part) { + *o += f * b as f32; + } + } + }; + + add(i0, i1, i2, g0 * g1 * g2); + add(j0, i1, i2, f0 * g1 * g2); + add(i0, j1, i2, g0 * f1 * g2); + add(j0, j1, i2, f0 * f1 * g2); + + add(i0, i1, j2, g0 * g1 * f2); + add(j0, i1, j2, f0 * g1 * f2); + add(i0, j1, j2, g0 * f1 * f2); + add(j0, j1, j2, f0 * f1 * f2); + } + _ => unimplemented!() + } + n => bail!("Order {}", n) + } + for (o, y) in self.output.iter().zip(out.iter_mut()) { + *y = o.map(*y); + } + Ok(()) + } +} + + +#[derive(Debug, Clone, DataSize)] +pub struct InterpolatedFunctionDim { + pub input_range: (f32, f32), + pub output_range: (f32, f32), + pub c0: f32, + pub c1: f32, + pub exponent: f32, +} +impl InterpolatedFunctionDim { + pub fn apply(&self, x: f32) -> f32 { + let y = self.c0 + x.powf(self.exponent) * (self.c1 - self.c0); + let (y0, y1) = self.output_range; + y.min(y1).max(y0) + } +} + +#[derive(Debug)] +pub enum PostScriptError { + StackUnderflow, + IncorrectStackSize +} +#[derive(Debug, Clone, DataSize)] +pub struct PsFunc { + pub ops: Vec +} + +macro_rules! op { + ($stack:ident; $($v:ident),* => $($e:expr),*) => ( { + $(let $v = $stack.pop().ok_or(PostScriptError::StackUnderflow)?;)* + $($stack.push($e);)* + } ) +} + +impl PsFunc { + fn exec_inner(&self, stack: &mut Vec) -> Result<(), PostScriptError> { + for &op in &self.ops { + match op { + PsOp::Int(i) => stack.push(i as f32), + PsOp::Value(v) => stack.push(v), + PsOp::Dup => op!(stack; v => v, v), + PsOp::Exch => op!(stack; b, a => b, a), + PsOp::Add => op!(stack; b, a => a + b), + PsOp::Sub => op!(stack; b, a => a - b), + PsOp::Mul => op!(stack; b, a => a * b), + PsOp::Abs => op!(stack; a => a.abs()), + PsOp::Roll => { + let j = stack.pop().ok_or(PostScriptError::StackUnderflow)? as isize; + let n = stack.pop().ok_or(PostScriptError::StackUnderflow)? as usize; + let start = stack.len() - n; + let slice = &mut stack[start..]; + if j > 0 { + slice.rotate_right(j as usize); + } else { + slice.rotate_left(-j as usize); + } + } + PsOp::Index => { + let n = stack.pop().ok_or(PostScriptError::StackUnderflow)? as usize; + if n >= stack.len() { return Err(PostScriptError::StackUnderflow); } + let val = stack[stack.len() - n - 1]; + stack.push(val); + } + PsOp::Cvr => {} + PsOp::Pop => { + stack.pop().ok_or(PostScriptError::StackUnderflow)?; + } + } + } + Ok(()) + } + pub fn exec(&self, input: &[f32], output: &mut [f32]) -> Result<()> { + let mut stack = Vec::with_capacity(10); + stack.extend_from_slice(input); + match self.exec_inner(&mut stack) { + Ok(()) => {}, + Err(_) => return Err(PdfError::PostScriptExec) + } + if output.len() != stack.len() { + bail!("incorrect output length: expected {}, found {}.", stack.len(), output.len()) + } + output.copy_from_slice(&stack); + Ok(()) + } + pub fn parse(s: &str) -> Result { + let start = s.find('{').ok_or(PdfError::PostScriptParse)?; + let end = s.rfind('}').ok_or(PdfError::PostScriptParse)?; + + let ops: Result, _> = s[start + 1 .. end].split_ascii_whitespace().map(PsOp::parse).collect(); + Ok(PsFunc { ops: ops? }) + } +} + +#[derive(Copy, Clone, Debug, DataSize)] +pub enum PsOp { + Int(i32), + Value(f32), + Add, + Sub, + Abs, + Mul, + Dup, + Exch, + Roll, + Index, + Cvr, + Pop, +} +impl PsOp { + pub fn parse(s: &str) -> Result { + if let Ok(i) = s.parse::() { + Ok(PsOp::Int(i)) + } else if let Ok(f) = s.parse::() { + Ok(PsOp::Value(f)) + } else { + Ok(match s { + "add" => PsOp::Add, + "sub" => PsOp::Sub, + "abs" => PsOp::Abs, + "mul" => PsOp::Mul, + "dup" => PsOp::Dup, + "exch" => PsOp::Exch, + "roll" => PsOp::Roll, + "index" => PsOp::Index, + "cvr" => PsOp::Cvr, + "pop" => PsOp::Pop, + _ => { + bail!("unimplemented op {}", s); + } + }) + } + } +} diff --git a/src-pdfrs/pdf/src/object/mod.rs b/src-pdfrs/pdf/src/object/mod.rs new file mode 100644 index 0000000..f7e363d --- /dev/null +++ b/src-pdfrs/pdf/src/object/mod.rs @@ -0,0 +1,854 @@ +//! `Object` trait, along with some implementations. References. +//! +//! Some of the structs are incomplete (missing fields that are in the PDF references). + +mod types; +mod stream; +mod color; +mod function; + +pub use self::types::*; +pub use self::stream::*; +pub use self::color::*; +pub use self::function::*; +pub use crate::file::PromisedRef; +use crate::parser::ParseFlags; + +use crate::primitive::*; +use crate::error::*; +use crate::enc::*; + +use std::fmt; +use std::marker::PhantomData; +use std::collections::HashMap; +use std::sync::Arc; +use std::ops::{Deref, Range}; +use std::hash::{Hash, Hasher}; +use std::convert::TryInto; +use datasize::DataSize; +use itertools::Itertools; +use once_cell::sync::OnceCell; + +pub type ObjNr = u64; +pub type GenNr = u64; + +pub struct ParseOptions { + pub allow_error_in_option: bool, + pub allow_xref_error: bool, + pub allow_invalid_ops: bool, + pub allow_missing_endobj: bool, +} +impl ParseOptions { + pub const fn tolerant() -> Self { + ParseOptions { + allow_error_in_option: true, + allow_xref_error: true, + allow_invalid_ops: true, + allow_missing_endobj: true, + } + } + pub const fn strict() -> Self { + ParseOptions { + allow_error_in_option: false, + allow_xref_error: false, + allow_invalid_ops: true, + allow_missing_endobj: false, + } + } +} + +pub trait Resolve: { + fn resolve_flags(&self, r: PlainRef, flags: ParseFlags, depth: usize) -> Result; + fn resolve(&self, r: PlainRef) -> Result { + self.resolve_flags(r, ParseFlags::ANY, 16) + } + fn get(&self, r: Ref) -> Result>; + fn options(&self) -> &ParseOptions; + fn stream_data(&self, id: PlainRef, range: Range) -> Result>; + fn get_data_or_decode(&self, id: PlainRef, range: Range, filters: &[StreamFilter]) -> Result>; +} + +pub struct NoResolve; +impl Resolve for NoResolve { + fn resolve_flags(&self, _: PlainRef, _: ParseFlags, _: usize) -> Result { + Err(PdfError::Reference) + } + fn get(&self, _r: Ref) -> Result> { + Err(PdfError::Reference) + } + fn options(&self) -> &ParseOptions { + static STRICT: ParseOptions = ParseOptions::strict(); + &STRICT + } + fn get_data_or_decode(&self, _: PlainRef, _: Range, _: &[StreamFilter]) -> Result> { + Err(PdfError::Reference) + } + fn stream_data(&self, id: PlainRef, range: Range) -> Result> { + Err(PdfError::Reference) + } + +} + +/// A PDF Object +pub trait Object: Sized + Sync + Send + 'static { + /// Convert primitive to Self + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result; +} + +pub trait Cloner: Updater + Resolve { + fn clone_plainref(&mut self, old: PlainRef) -> Result; + fn clone_ref(&mut self, old: Ref) -> Result>; + fn clone_rcref(&mut self, old: &RcRef) -> Result>; + fn clone_shared(&mut self, old: &Shared) -> Result>; +} + +pub trait DeepClone: Sized + Sync + Send + 'static { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result; +} + +pub trait Updater { + fn create(&mut self, obj: T) -> Result>; + fn update(&mut self, old: PlainRef, obj: T) -> Result>; + fn promise(&mut self) -> PromisedRef; + fn fulfill(&mut self, promise: PromisedRef, obj: T) -> Result>; +} + +pub struct NoUpdate; +impl Updater for NoUpdate { + fn create(&mut self, _obj: T) -> Result> { panic!() } + fn update(&mut self, _old: PlainRef, _obj: T) -> Result> { panic!() } + fn promise(&mut self) -> PromisedRef { panic!() } + fn fulfill(&mut self, _promise: PromisedRef, _obj: T) -> Result> { panic!() } +} + +pub trait ObjectWrite { + fn to_primitive(&self, update: &mut impl Updater) -> Result; +} + +pub trait FromDict: Sized { + fn from_dict(dict: Dictionary, resolve: &impl Resolve) -> Result; +} +pub trait ToDict: ObjectWrite { + fn to_dict(&self, update: &mut impl Updater) -> Result; +} + +pub trait SubType {} + +pub trait Trace { + fn trace(&self, _cb: &mut impl FnMut(PlainRef)) {} +} + +/////// +// Refs +/////// + +// TODO move to primitive.rs +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, DataSize)] +pub struct PlainRef { + pub id: ObjNr, + pub gen: GenNr, +} +impl Object for PlainRef { + fn from_primitive(p: Primitive, _: &impl Resolve) -> Result { + p.into_reference() + } +} +impl ObjectWrite for PlainRef { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Reference(*self)) + } +} +impl DeepClone for PlainRef { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + cloner.clone_plainref(*self) + } +} + +// NOTE: Copy & Clone implemented manually ( https://github.com/rust-lang/rust/issues/26925 ) + +#[derive(DataSize)] +pub struct Ref { + inner: PlainRef, + _marker: PhantomData +} +impl Clone for Ref { + fn clone(&self) -> Ref { + *self + } +} +impl Copy for Ref {} + +impl Ref { + pub fn new(inner: PlainRef) -> Ref { + Ref { + inner, + _marker: PhantomData, + } + } + pub fn from_id(id: ObjNr) -> Ref { + Ref { + inner: PlainRef {id, gen: 0}, + _marker: PhantomData, + } + } + pub fn get_inner(&self) -> PlainRef { + self.inner + } + pub fn upcast(self) -> Ref where T: SubType { + Ref::new(self.inner) + } +} +impl Object for Ref { + fn from_primitive(p: Primitive, _: &impl Resolve) -> Result { + Ok(Ref::new(p.into_reference()?)) + } +} +impl ObjectWrite for Ref { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.inner.to_primitive(update) + } +} +impl DeepClone for Ref { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + cloner.clone_ref(*self) + } +} +impl Trace for Ref { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + cb(self.inner); + } +} +impl fmt::Debug for Ref { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Ref({})", self.inner.id) + } +} +impl Hash for Ref { + fn hash(&self, state: &mut H) { + self.inner.hash(state) + } +} +impl PartialEq for Ref { + fn eq(&self, rhs: &Self) -> bool { + self.inner.eq(&rhs.inner) + } +} +impl Eq for Ref {} + +pub type Shared = Arc; + + +#[derive(Debug, DataSize)] +pub struct RcRef { + inner: PlainRef, + data: Shared +} +impl From> for Primitive { + fn from(value: RcRef) -> Self { + Primitive::Reference(value.inner) + } +} +impl From> for Ref { + fn from(value: RcRef) -> Self { + value.get_ref() + } +} + +impl RcRef { + pub fn new(inner: PlainRef, data: Shared) -> RcRef { + RcRef { inner, data } + } + pub fn get_ref(&self) -> Ref { + Ref::new(self.inner) + } + pub fn data(&self) -> &Shared { + &self.data + } +} +impl Object for RcRef { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Reference(r) => resolve.get(Ref::new(r)), + p => Err(PdfError::UnexpectedPrimitive {expected: "Reference", found: p.get_debug_name()}) + } + } +} +impl ObjectWrite for RcRef { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.inner.to_primitive(update) + } +} +impl DeepClone for RcRef { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + cloner.clone_rcref(self) + } +} + +impl Deref for RcRef { + type Target = T; + fn deref(&self) -> &T { + &self.data + } +} +impl Clone for RcRef { + fn clone(&self) -> RcRef { + RcRef { + inner: self.inner, + data: self.data.clone(), + } + } +} +impl Trace for RcRef { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + cb(self.inner); + } +} +impl<'a, T> From<&'a RcRef> for Ref { + fn from(r: &'a RcRef) -> Ref { + Ref::new(r.inner) + } +} +impl Hash for RcRef { + fn hash(&self, state: &mut H) { + std::ptr::hash(&**self, state) + } +} +impl PartialEq for RcRef { + fn eq(&self, rhs: &Self) -> bool { + std::ptr::eq(&**self, &**rhs) + } +} +impl Eq for RcRef {} + +#[derive(Debug, DataSize)] +pub enum MaybeRef { + Direct(Shared), + Indirect(RcRef), +} +impl MaybeRef { + pub fn as_ref(&self) -> Option> { + match *self { + MaybeRef::Indirect(ref r) => Some(r.get_ref()), + _ => None + } + } + pub fn data(&self) -> &Shared { + match *self { + MaybeRef::Direct(ref t) => t, + MaybeRef::Indirect(ref r) => &r.data + } + } +} +impl Object for MaybeRef { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + Ok(match p { + Primitive::Reference(r) => MaybeRef::Indirect(resolve.get(Ref::new(r))?), + p => MaybeRef::Direct(Shared::new(T::from_primitive(p, resolve)?)) + }) + } +} +impl ObjectWrite for MaybeRef { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + MaybeRef::Direct(ref inner) => inner.to_primitive(update), + MaybeRef::Indirect(r) => r.to_primitive(update) + } + } +} +impl DeepClone for MaybeRef { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + match *self { + MaybeRef::Direct(ref old) => cloner.clone_shared(old).map(MaybeRef::Direct), + MaybeRef::Indirect(ref old) => cloner.clone_rcref(old).map(MaybeRef::Indirect) + } + } +} +impl Deref for MaybeRef { + type Target = T; + fn deref(&self) -> &T { + match *self { + MaybeRef::Direct(ref t) => t, + MaybeRef::Indirect(ref r) => r + } + } +} +impl Clone for MaybeRef { + fn clone(&self) -> Self { + match *self { + MaybeRef::Direct(ref rc) => MaybeRef::Direct(rc.clone()), + MaybeRef::Indirect(ref r) => MaybeRef::Indirect(r.clone()) + } + } +} +impl Trace for MaybeRef { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + match *self { + MaybeRef::Indirect(ref rc) => rc.trace(cb), + MaybeRef::Direct(_) => () + } + } +} +impl From> for MaybeRef { + fn from(r: Shared) -> MaybeRef { + MaybeRef::Direct(r) + } +} +impl From for MaybeRef { + fn from(t: T) -> MaybeRef { + MaybeRef::Direct(t.into()) + } +} +impl From> for Shared { + fn from(r: MaybeRef) -> Shared { + match r { + MaybeRef::Direct(rc) => rc, + MaybeRef::Indirect(r) => r.data + } + } +} +impl<'a, T> From<&'a MaybeRef> for Shared { + fn from(r: &'a MaybeRef) -> Shared { + match r { + MaybeRef::Direct(ref rc) => rc.clone(), + MaybeRef::Indirect(ref r) => r.data.clone() + } + } +} +impl From> for MaybeRef { + fn from(r: RcRef) -> MaybeRef { + MaybeRef::Indirect(r) + } +} +impl Hash for MaybeRef { + fn hash(&self, state: &mut H) { + std::ptr::hash(&**self, state) + } +} +impl PartialEq for MaybeRef { + fn eq(&self, rhs: &Self) -> bool { + std::ptr::eq(&**self, &**rhs) + } +} +impl Eq for MaybeRef {} + +#[derive(Debug)] +pub struct Lazy { + primitive: Primitive, + cache: OnceCell>, + _marker: PhantomData +} +impl DataSize for Lazy { + const IS_DYNAMIC: bool = true; + const STATIC_HEAP_SIZE: usize = size_of::(); + fn estimate_heap_size(&self) -> usize { + self.cache.get().map(|value| value.estimate_heap_size()).unwrap_or(0) + size_of::() + } +} +impl Clone for Lazy { + fn clone(&self) -> Self { + Lazy { + primitive: self.primitive.clone(), + cache: self.cache.clone(), + _marker: PhantomData + } + } +} +impl DeepClone for Lazy { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok(Lazy { + primitive: self.primitive.deep_clone(cloner)?, + cache: OnceCell::new(), + _marker: PhantomData + }) + } +} +impl Lazy { + pub fn load(&self, resolve: &impl Resolve) -> Result> { + self.cache.get_or_try_init(|| { + match self.primitive { + Primitive::Reference(r) => resolve.get(Ref::new(r)).map(MaybeRef::Indirect), + ref p => T::from_primitive(p.clone(), resolve).map(|o| MaybeRef::Direct(Arc::new(o))), + } + }).cloned() + } +} +impl Object for Lazy { + fn from_primitive(p: Primitive, _: &impl Resolve) -> Result { + Ok(Self { + primitive: p, + cache: OnceCell::new(), + _marker: PhantomData + }) + } +} +impl ObjectWrite for Lazy { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + Ok(self.primitive.clone()) + } +} +impl Default for Lazy { + fn default() -> Self { + Lazy { + primitive: Primitive::Null, + cache: OnceCell::new(), + _marker: PhantomData + } + } +} +impl From> for Lazy { + fn from(value: RcRef) -> Self { + Lazy { + primitive: Primitive::Reference(value.inner), + cache: OnceCell::with_value(MaybeRef::Direct(value.data)), + _marker: PhantomData + } + } +} + +////////////////////////////////////// +// Object for Primitives & other types +////////////////////////////////////// + +impl Object for i32 { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Reference(id) => r.resolve(id)?.as_integer(), + p => p.as_integer() + } + } +} +impl ObjectWrite for i32 { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Integer(*self)) + } +} + +impl Object for u32 { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Reference(id) => r.resolve(id)?.as_u32(), + p => p.as_u32() + } + } +} +impl ObjectWrite for u32 { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Integer(*self as _)) + } +} + +impl Object for usize { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Reference(id) => Ok(r.resolve(id)?.as_u32()? as usize), + p => Ok(p.as_u32()? as usize) + } + } +} +impl ObjectWrite for usize { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Integer(*self as _)) + } +} + +impl Object for f32 { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Reference(id) => r.resolve(id)?.as_number(), + p => p.as_number() + } + } +} +impl ObjectWrite for f32 { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Number(*self)) + } +} + +impl Object for bool { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Reference(id) => r.resolve(id)?.as_bool(), + p => p.as_bool() + } + } +} +impl ObjectWrite for bool { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Boolean(*self)) + } +} + +impl Object for Dictionary { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Dictionary(dict) => Ok(dict), + Primitive::Reference(id) => Dictionary::from_primitive(r.resolve(id)?, r), + _ => Err(PdfError::UnexpectedPrimitive {expected: "Dictionary", found: p.get_debug_name()}), + } + } +} + +impl Object for Name { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + p.resolve(resolve)?.into_name() + } +} +impl ObjectWrite for Name { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Name(self.0.clone())) + } +} + +impl Object for Vec { + /// Will try to convert `p` to `T` first, then try to convert `p` to Vec + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + Ok( + match p { + Primitive::Array(_) => { + p.resolve(r)?.into_array()? + .into_iter() + .map(|p| T::from_primitive(p, r)) + .collect::>>()? + }, + Primitive::Null => { + Vec::new() + } + Primitive::Reference(id) => Self::from_primitive(r.resolve(id)?, r)?, + _ => vec![T::from_primitive(p, r)?] + } + ) + } +} +impl ObjectWrite for Vec { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + Primitive::array::(self.iter(), update) + } +} +impl DeepClone for Vec { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + self.iter().map(|t| t.deep_clone(cloner)).collect() + } +} +impl Trace for Vec { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + for i in self.iter() { + i.trace(cb); + } + } +} +/* +pub struct Data(pub Vec); +impl Object for Data { + fn serialize(&self, out: &mut W) -> Result<()> { + unimplemented!() + } + /// Will try to convert `p` to `T` first, then try to convert `p` to Vec + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::Array(_) => { + p.into_array(r)? + .into_iter() + .map(|p| u8::from_primitive(p, r)) + .collect::>>()? + }, + Primitive::Null => { + Vec::new() + } + Primitive::Reference(id) => Self::from_primitive(r.resolve(id)?, r)?, + _ => + } + } +}*/ + +impl Object for Primitive { + fn from_primitive(p: Primitive, _: &impl Resolve) -> Result { + Ok(p) + } +} +impl ObjectWrite for Primitive { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(self.clone()) + } +} +impl DeepClone for Primitive { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + match *self { + Primitive::Array(ref parts) => Ok(Primitive::Array(parts.into_iter().map(|p| p.deep_clone(cloner)).try_collect()?)), + Primitive::Boolean(b) => Ok(Primitive::Boolean(b)), + Primitive::Dictionary(ref dict) => Ok(Primitive::Dictionary(dict.deep_clone(cloner)?)), + Primitive::Integer(i) => Ok(Primitive::Integer(i)), + Primitive::Name(ref name) => Ok(Primitive::Name(name.clone())), + Primitive::Null => Ok(Primitive::Null), + Primitive::Number(n) => Ok(Primitive::Number(n)), + Primitive::Reference(r) => Ok(Primitive::Reference(r.deep_clone(cloner)?)), + Primitive::Stream(ref s) => Ok(Primitive::Stream(s.deep_clone(cloner)?)), + Primitive::String(ref s) => Ok(Primitive::String(s.clone())) + } + } +} + +impl Trace for Primitive { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + match *self { + Primitive::Reference(r) => cb(r), + Primitive::Array(ref parts) => parts.iter().for_each(|p| p.trace(cb)), + Primitive::Dictionary(ref dict) => dict.values().for_each(|p| p.trace(cb)), + _ => () + } + } +} + +impl Object for HashMap { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Null => Ok(HashMap::new()), + Primitive::Dictionary (dict) => { + let mut new = Self::new(); + for (key, val) in dict.iter() { + new.insert(key.clone(), V::from_primitive(val.clone(), resolve)?); + } + Ok(new) + } + Primitive::Reference (id) => HashMap::from_primitive(resolve.resolve(id)?, resolve), + p => Err(PdfError::UnexpectedPrimitive {expected: "Dictionary", found: p.get_debug_name()}) + } + } +} +impl ObjectWrite for HashMap { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + if self.is_empty() { + Ok(Primitive::Null) + } else { + let mut dict = Dictionary::new(); + for (k, v) in self.iter() { + dict.insert(k.clone(), v.to_primitive(update)?); + } + Ok(Primitive::Dictionary(dict)) + } + } +} +impl DeepClone for HashMap { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + self.iter().map(|(k, v)| Ok((k.clone(), v.deep_clone(cloner)?))).collect() + } +} + +impl Object for Option { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Null => Ok(None), + p => match T::from_primitive(p, resolve) { + Ok(p) => Ok(Some(p)), + // References to non-existing objects ought not to be an error + Err(PdfError::NullRef {..}) => Ok(None), + Err(PdfError::FreeObject {..}) => Ok(None), + Err(e) if resolve.options().allow_error_in_option => { + warn!("ignoring {:?}", e); + Ok(None) + } + Err(e) => Err(e) + } + } + } +} +impl ObjectWrite for Option { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + None => Ok(Primitive::Null), + Some(t) => t.to_primitive(update) + } + } +} +impl DeepClone for Option { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + match self { + None => Ok(None), + Some(t) => t.deep_clone(cloner).map(Some) + } + } +} + +impl Trace for Option { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + if let Some(ref t) = *self { + t.trace(cb) + } + } +} + +impl Object for Box { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + T::from_primitive(p, resolve).map(Box::new) + } +} +impl ObjectWrite for Box { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + (**self).to_primitive(update) + } +} +impl Trace for Box { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + (**self).trace(cb) + } +} + +impl Object for () { + fn from_primitive(_p: Primitive, _resolve: &impl Resolve) -> Result { + Ok(()) + } +} +impl ObjectWrite for () { + fn to_primitive(&self, _: &mut impl Updater) -> Result { + Ok(Primitive::Null) + } +} +impl Trace for () {} + +impl Object for (T, U) where T: Object, U: Object { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let arr = p.resolve(resolve)?.into_array()?; + if arr.len() != 2 { + bail!("expected array of length 2 (found {})", arr.len()); + } + let [a, b]: [Primitive; 2] = arr.try_into().unwrap(); + Ok((T::from_primitive(a, resolve)?, U::from_primitive(b, resolve)?)) + } +} + +impl ObjectWrite for (T, U) where T: ObjectWrite, U: ObjectWrite { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + Ok(Primitive::Array(vec![self.0.to_primitive(update)?, self.1.to_primitive(update)?])) + } +} + +impl Trace for (T, U) { + fn trace(&self, cb: &mut impl FnMut(PlainRef)) { + self.0.trace(cb); + self.1.trace(cb); + } +} + +impl DeepClone for Box { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok(Box::new((&**self).deep_clone(cloner)?)) + } +} +macro_rules! deep_clone_simple { + ($($t:ty),*) => ( + $( + impl DeepClone for $t { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok(self.clone()) + } + } + )* + ) +} +deep_clone_simple!(f32, i32, u32, bool, Name, (), Date, PdfString, Rectangle, u8, Arc<[u8]>, Vec); + +impl DeepClone for (A, B) { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok((self.0.deep_clone(cloner)?, self.1.deep_clone(cloner)?)) + } +} diff --git a/src-pdfrs/pdf/src/object/stream.rs b/src-pdfrs/pdf/src/object/stream.rs new file mode 100644 index 0000000..ad6fbc2 --- /dev/null +++ b/src-pdfrs/pdf/src/object/stream.rs @@ -0,0 +1,391 @@ +use datasize::DataSize; + +use crate as pdf; +use crate::object::*; +use crate::primitive::*; +use crate::error::*; +use crate::parser::Lexer; +use crate::enc::{StreamFilter, decode}; + +use std::ops::{Deref, Range}; +use std::fmt; + +#[derive(Clone)] +pub (crate) enum StreamData { + Generated(Arc<[u8]>), + Original(Range, PlainRef), +} +datasize::non_dynamic_const_heap_size!(StreamData, std::mem::size_of::()); + +/// Simple Stream object with only some additional entries from the stream dict (I). +#[derive(Clone, DataSize)] +pub struct Stream { + pub info: StreamInfo, + pub (crate) inner_data: StreamData, +} +impl Stream { + pub fn from_stream(s: PdfStream, resolve: &impl Resolve) -> Result { + let PdfStream {info, inner} = s; + let info = StreamInfo::::from_primitive(Primitive::Dictionary (info), resolve)?; + let inner_data = match inner { + StreamInner::InFile { id, file_range } => StreamData::Original(file_range, id), + StreamInner::Pending { data } => StreamData::Generated(data) + }; + Ok(Stream { info, inner_data }) + } + + /// the data is not compressed. the specified filters are to be applied when compressing the data + pub fn new_with_filters(i: I, data: impl Into>, filters: Vec) -> Stream { + Stream { + info: StreamInfo { + filters, + file: None, + file_filters: Vec::new(), + info: i + }, + inner_data: StreamData::Generated(data.into()), + } + } + pub fn new(i: I, data: impl Into>) -> Stream { + Stream { + info: StreamInfo { + filters: Vec::new(), + file: None, + file_filters: Vec::new(), + info: i + }, + inner_data: StreamData::Generated(data.into()), + } + } + /// the data is already compressed with the specified filters + pub fn from_compressed(i: I, data: impl Into>, filters: Vec) -> Stream { + Stream { + info: StreamInfo { + filters: filters.clone(), + file: None, + file_filters: Vec::new(), + info: i + }, + inner_data: StreamData::Generated(data.into()), + } + } + + pub fn data(&self, resolve: &impl Resolve) -> Result> { + match self.inner_data { + StreamData::Generated(ref data) => { + let filters = &self.info.filters; + if filters.len() == 0 { + Ok(data.clone()) + } else { + use std::borrow::Cow; + let mut data: Cow<[u8]> = (&**data).into(); + for filter in filters { + data = t!(decode(&data, filter), filter).into(); + } + Ok(data.into()) + } + } + StreamData::Original(ref file_range, id) => { + resolve.get_data_or_decode(id, file_range.clone(), &self.info.filters) + } + } + } + + pub fn len(&self) -> usize { + match self.inner_data { + StreamData::Generated(ref data) => data.len(), + StreamData::Original(ref range, _) => range.len() + } + } +} + +impl fmt::Debug for Stream { + fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + write!(f, "Stream info={:?}, len={}", self.info.info, self.len()) + } +} + +impl Object for Stream { + /// Convert primitive to Self + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let s = PdfStream::from_primitive(p, resolve)?; + Stream::from_stream(s, resolve) + } +} +impl Stream { + pub fn to_pdf_stream(&self, update: &mut impl Updater) -> Result { + let mut info = match self.info.info.to_primitive(update)? { + Primitive::Dictionary(dict) => dict, + Primitive::Null => Dictionary::new(), + p => bail!("stream info has to be a dictionary (found {:?})", p) + }; + let mut params = None; + if self.info.filters.len() > 0 { + for f in self.info.filters.iter() { + if let Some(para) = match f { + StreamFilter::LZWDecode(ref p) => Some(p.to_primitive(update)?), + StreamFilter::FlateDecode(ref p) => Some(p.to_primitive(update)?), + StreamFilter::DCTDecode(ref p) => Some(p.to_primitive(update)?), + StreamFilter::CCITTFaxDecode(ref p) => Some(p.to_primitive(update)?), + StreamFilter::JBIG2Decode(ref p) => Some(p.to_primitive(update)?), + _ => None + } { + assert!(params.is_none()); + params = Some(para); + } + } + let mut filters = self.info.filters.iter().map(|filter| match filter { + StreamFilter::ASCIIHexDecode => "ASCIIHexDecode", + StreamFilter::ASCII85Decode => "ASCII85Decode", + StreamFilter::LZWDecode(ref _p) => "LZWDecode", + StreamFilter::FlateDecode(ref _p) => "FlateDecode", + StreamFilter::JPXDecode => "JPXDecode", + StreamFilter::DCTDecode(ref _p) => "DCTDecode", + StreamFilter::CCITTFaxDecode(ref _p) => "CCITTFaxDecode", + StreamFilter::JBIG2Decode(ref _p) => "JBIG2Decode", + StreamFilter::Crypt => "Crypt", + StreamFilter::RunLengthDecode => "RunLengthDecode", + }) + .map(|s| Primitive::Name(s.into())); + match self.info.filters.len() { + 0 => {}, + 1 => { + info.insert("Filter", filters.next().unwrap().to_primitive(update)?); + } + _ => { + info.insert("Filter", Primitive::array::(filters, update)?); + } + } + } + if let Some(para) = params { + info.insert("DecodeParms", para); + } + + let inner = match self.inner_data { + StreamData::Generated(ref data) => { + info.insert("Length", Primitive::Integer(data.len() as _)); + StreamInner::Pending { data: data.clone() } + }, + StreamData::Original(ref file_range, id) => { + info.insert("Length", Primitive::Integer(file_range.len() as _)); + StreamInner::InFile { id, file_range: file_range.clone() } + } + }; + + Ok(PdfStream { info, inner }) + } +} +impl ObjectWrite for Stream { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.to_pdf_stream(update).map(Primitive::Stream) + } +} +impl DeepClone for Stream { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + let data = match self.inner_data { + StreamData::Generated(ref data) => data.clone(), + StreamData::Original(ref range, id) => cloner.stream_data(id, range.clone())? + }; + Ok(Stream { + info: self.info.deep_clone(cloner)?, + inner_data: StreamData::Generated(data), + }) + } +} +impl Deref for Stream { + type Target = StreamInfo; + fn deref(&self) -> &StreamInfo { + &self.info + } +} + + +/// General stream type. `I` is the additional information to be read from the stream dict. +#[derive(Debug, Clone, DataSize, DeepClone)] +pub struct StreamInfo { + // General dictionary entries + /// Filters that the `data` is currently encoded with (corresponds to both `/Filter` and + /// `/DecodeParms` in the PDF specs), constructed in `from_primitive()`. + pub filters: Vec, + + /// Eventual file containing the stream contentst + pub file: Option, + /// Filters to apply to external file specified in `file`. + pub file_filters: Vec, + + // TODO: + /* + /// Filters to apply to external file specified in `file`. + #[pdf(key="FFilter")] + file_filters: Vec, + #[pdf(key="FDecodeParms")] + file_decode_parms: Vec, + /// Number of bytes in the decoded stream + #[pdf(key="DL")] + dl: Option, + */ + // Specialized dictionary entries + pub info: I, +} + +impl Deref for StreamInfo { + type Target = I; + fn deref(&self) -> &I { + &self.info + } +} + +impl Default for StreamInfo { + fn default() -> StreamInfo { + StreamInfo { + filters: Vec::new(), + file: None, + file_filters: Vec::new(), + info: I::default(), + } + } +} +impl StreamInfo { +/* + /// If the stream is not encoded, this is a no-op. `decode()` should be called whenever it's uncertain + /// whether the stream is encoded. + pub fn encode(&mut self, _filter: StreamFilter) { + // TODO this should add the filter to `self.filters` and encode the data with the given + // filter + unimplemented!(); + }*/ + pub fn get_filters(&self) -> &[StreamFilter] { + &self.filters + } +} +impl Object for StreamInfo { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut dict = Dictionary::from_primitive(p, resolve)?; + + let _length = usize::from_primitive( + dict.remove("Length").ok_or(PdfError::MissingEntry{ typ: "StreamInfo", field: "Length".into() })?, + resolve)?; + + let filters = Vec::::from_primitive( + dict.remove("Filter").unwrap_or(Primitive::Null), + resolve)?; + + let decode_params = Vec::>::from_primitive( + dict.remove("DecodeParms").unwrap_or(Primitive::Null), + resolve)?; + + let file = Option::::from_primitive( + dict.remove("F").unwrap_or(Primitive::Null), + resolve)?; + + let file_filters = Vec::::from_primitive( + dict.remove("FFilter").unwrap_or(Primitive::Null), + resolve)?; + + let file_decode_params = Vec::::from_primitive( + dict.remove("FDecodeParms").unwrap_or(Primitive::Null), + resolve)?; + + + let mut new_filters = Vec::new(); + let mut new_file_filters = Vec::new(); + + for (i, filter) in filters.iter().enumerate() { + let params = match decode_params.get(i) { + Some(Some(params)) => params.clone(), + _ => Dictionary::default(), + }; + new_filters.push(StreamFilter::from_kind_and_params(filter, params, resolve)?); + } + for (i, filter) in file_filters.iter().enumerate() { + let params = match file_decode_params.get(i) { + Some(params) => params.clone(), + None => Dictionary::default(), + }; + new_file_filters.push(StreamFilter::from_kind_and_params(filter, params, resolve)?); + } + + Ok(StreamInfo { + // General + filters: new_filters, + file, + file_filters: new_file_filters, + // Special + info: T::from_primitive(Primitive::Dictionary (dict), resolve)?, + }) + } +} + +#[derive(Object, Default, Debug, DataSize)] +#[pdf(Type = "ObjStm")] +pub struct ObjStmInfo { + #[pdf(key = "N")] + /// Number of compressed objects in the stream. + pub num_objects: usize, + + #[pdf(key = "First")] + /// The byte offset in the decoded stream, of the first compressed object. + pub first: usize, + + #[pdf(key = "Extends")] + /// A reference to an eventual ObjectStream which this ObjectStream extends. + pub extends: Option>>, +} + +#[derive(DataSize)] +pub struct ObjectStream { + /// Byte offset of each object. Index is the object number. + offsets: Vec, + /// The object number of this object. + _id: ObjNr, + + inner: Stream +} + +impl Object for ObjectStream { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let stream: Stream = Stream::from_primitive(p, resolve)?; + + let mut offsets = Vec::new(); + { + debug!("parsing stream"); + let data = stream.data(resolve)?; + let mut lexer = Lexer::new(&data); + for _ in 0..(stream.info.num_objects as ObjNr) { + let _obj_nr = lexer.next()?.to::()?; + let offset = lexer.next()?.to::()?; + offsets.push(offset); + } + } + + Ok(ObjectStream { + offsets, + _id: 0, // TODO + inner: stream + }) + } +} + +impl ObjectStream { + pub fn get_object_slice(&self, index: usize, resolve: &impl Resolve) -> Result<(Arc<[u8]>, Range)> { + if index >= self.offsets.len() { + err!(PdfError::ObjStmOutOfBounds {index, max: self.offsets.len()}); + } + let start = self.inner.info.first + self.offsets[index]; + let data = self.inner.data(resolve)?; + let end = if index == self.offsets.len() - 1 { + data.len() + } else { + self.inner.info.first + self.offsets[index + 1] + }; + + Ok((data, start..end)) + } + /// Returns the number of contained objects + pub fn n_objects(&self) -> usize { + self.offsets.len() + } + pub fn _data(&self, resolve: &impl Resolve) -> Result> { + self.inner.data(resolve) + } +} diff --git a/src-pdfrs/pdf/src/object/types.rs b/src-pdfrs/pdf/src/object/types.rs new file mode 100644 index 0000000..2413086 --- /dev/null +++ b/src-pdfrs/pdf/src/object/types.rs @@ -0,0 +1,1844 @@ +//! Models of PDF types + +use std::collections::{HashMap, VecDeque}; +use datasize::DataSize; + +use crate as pdf; +use crate::content::deep_clone_op; +use crate::object::*; +use crate::error::*; +use crate::content::{Content, FormXObject, Matrix, parse_ops, serialize_ops, Op}; +use crate::font::Font; +use crate::enc::StreamFilter; + +/// Node in a page tree - type is either `Page` or `PageTree` +#[derive(Debug, Clone, DataSize)] +pub enum PagesNode { + Tree(PageTree), + Leaf(Page), +} + +impl Object for PagesNode { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut dict = p.resolve(resolve)?.into_dictionary()?; + match dict.require("PagesNode", "Type")?.as_name()? { + "Page" => Ok(PagesNode::Leaf(t!(Page::from_dict(dict, resolve)))), + "Pages" => Ok(PagesNode::Tree(t!(PageTree::from_dict(dict, resolve)))), + other => Err(PdfError::WrongDictionaryType {expected: "Page or Pages".into(), found: other.into()}), + } + } +} +impl ObjectWrite for PagesNode { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match *self { + PagesNode::Tree(ref t) => t.to_primitive(update), + PagesNode::Leaf(ref l) => l.to_primitive(update), + } + } +} + +/* +use std::iter::once; +use itertools::Either; +// needs recursive types +impl PagesNode { + pub fn pages<'a>(&'a self, resolve: &'a impl Resolve) -> impl Iterator> + 'a { + match self { + PagesNode::Tree(ref tree) => Either::Left(Box::new(tree.pages(resolve))), + PagesNode::Leaf(ref page) => Either::Right(once(Ok(PageRc(page.clone())))) + } + } +} +*/ + +/// A `PagesNode::Leaf` wrapped in a `RcRef` +/// +#[derive(Debug, Clone, DataSize)] +pub struct PageRc(RcRef); +impl Deref for PageRc { + type Target = Page; + fn deref(&self) -> &Page { + match *self.0 { + PagesNode::Leaf(ref page) => page, + _ => unreachable!() + } + } +} +impl PageRc { + pub fn create(page: Page, update: &mut impl Updater) -> Result { + Ok(PageRc(update.create(PagesNode::Leaf(page))?)) + } + pub fn get_ref(&self) -> Ref { + self.0.get_ref() + } +} +impl Object for PageRc { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let node = t!(RcRef::from_primitive(p, resolve)); + match *node { + PagesNode::Tree(_) => Err(PdfError::WrongDictionaryType {expected: "Page".into(), found: "Pages".into()}), + PagesNode::Leaf(_) => Ok(PageRc(node)) + } + } +} +impl ObjectWrite for PageRc { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.0.to_primitive(update) + } +} + +/// A `PagesNode::Tree` wrapped in a `RcRef` +/// +#[derive(Debug, Clone, DataSize)] +pub struct PagesRc(RcRef); +impl Deref for PagesRc { + type Target = PageTree; + fn deref(&self) -> &PageTree { + match *self.0 { + PagesNode::Tree(ref tree) => tree, + _ => unreachable!() + } + } +} +impl PagesRc { + pub fn create(tree: PageTree, update: &mut impl Updater) -> Result { + Ok(PagesRc(update.create(PagesNode::Tree(tree))?)) + } +} +impl Object for PagesRc { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let node = t!(RcRef::from_primitive(p, resolve)); + match *node { + PagesNode::Leaf(_) => Err(PdfError::WrongDictionaryType {expected: "Pages".into(), found: "Page".into()}), + PagesNode::Tree(_) => Ok(PagesRc(node)) + } + } +} +impl ObjectWrite for PagesRc { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.0.to_primitive(update) + } +} + +#[derive(Object, ObjectWrite, Debug, DataSize)] +#[pdf(Type = "Catalog?")] +pub struct Catalog { + #[pdf(key="Version")] + pub version: Option, + + #[pdf(key="Pages")] + pub pages: PagesRc, + + #[pdf(key="PageLabels")] + pub page_labels: Option>, + + #[pdf(key="Names")] + pub names: Option>, + + #[pdf(key="Dests")] + pub dests: Option>, + +// ViewerPreferences: dict +// PageLayout: name +// PageMode: name + + #[pdf(key="Outlines")] + pub outlines: Option, +// Threads: array +// OpenAction: array or dict +// AA: dict +// URI: dict +// AcroForm: dict + #[pdf(key="AcroForm")] + pub forms: Option, + +// Metadata: stream + #[pdf(key="Metadata")] + pub metadata: Option>>, + + #[pdf(key="StructTreeRoot")] + pub struct_tree_root: Option, + +// MarkInfo: dict +// Lang: text string +// SpiderInfo: dict +// OutputIntents: array +// PieceInfo: dict +// OCProperties: dict +// Perms: dict +// Legal: dict +// Requirements: array +// Collection: dict +// NeedsRendering: bool +} + +#[derive(Object, ObjectWrite, Debug, Default, Clone, DataSize)] +#[pdf(Type = "Pages?")] +pub struct PageTree { + #[pdf(key="Parent")] + pub parent: Option, + + #[pdf(key="Kids")] + pub kids: Vec>, + + #[pdf(key="Count")] + pub count: u32, + + #[pdf(key="Resources")] + pub resources: Option>, + + #[pdf(key="MediaBox")] + pub media_box: Option, + + #[pdf(key="CropBox")] + pub crop_box: Option, +} +impl PageTree { + pub fn page(&self, resolve: &impl Resolve, page_nr: u32) -> Result { + self.page_limited(resolve, page_nr, 16) + } + fn page_limited(&self, resolve: &impl Resolve, page_nr: u32, depth: usize) -> Result { + if depth == 0 { + bail!("page tree depth exeeded"); + } + let mut pos = 0; + for &kid in &self.kids { + let node = resolve.get(kid)?; + match *node { + PagesNode::Tree(ref tree) => { + if (pos .. pos + tree.count).contains(&page_nr) { + return tree.page_limited(resolve, page_nr - pos, depth - 1); + } + pos += tree.count; + } + PagesNode::Leaf(ref _page) => { + if pos == page_nr { + return Ok(PageRc(node)); + } + pos += 1; + } + } + } + Err(PdfError::PageOutOfBounds {page_nr, max: pos}) + } + + /* + pub fn update_pages(&mut self, mut offset: u32, page_nr: u32, page: Page) -> Result<()> { + for kid in &self.kids { + // println!("{}/{} {:?}", offset, page_nr, kid); + match *(self.get(*kid)?) { + PagesNode::Tree(ref mut t) => { + if offset + t.count < page_nr { + offset += t.count; + } else { + return self.update_pages(t, offset, page_nr, page); + } + }, + PagesNode::Leaf(ref mut p) => { + if offset < page_nr { + offset += 1; + } else { + assert_eq!(offset, page_nr); + let p = self.storage.create(page)?; + self.storage.update(kid.get_inner(), PagesNode::Leaf(p)); + return Ok(()); + } + } + } + + } + Err(PdfError::PageNotFound {page_nr: page_nr}) + } + pub fn pages<'a>(&'a self, resolve: &'a impl Resolve) -> impl Iterator> + 'a { + self.kids.iter().flat_map(move |&r| { + match resolve.get(r) { + Ok(node) => Either::Left(node.pages(resolve)), + Err(e) => Either::Right(once(Err(e))) + } + }) + } + */ +} +impl SubType for PageTree {} + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize)] +#[pdf(Type="Page?")] +pub struct Page { + #[pdf(key="Parent")] + pub parent: PagesRc, + + #[pdf(key="Resources", indirect)] + pub resources: Option>, + + #[pdf(key="MediaBox")] + pub media_box: Option, + + #[pdf(key="CropBox")] + pub crop_box: Option, + + #[pdf(key="TrimBox")] + pub trim_box: Option, + + #[pdf(key="Contents")] + pub contents: Option, + + #[pdf(key="Rotate", default="0")] + pub rotate: i32, + + #[pdf(key="Metadata")] + pub metadata: Option, + + #[pdf(key="LGIDict")] + pub lgi: Option, + + #[pdf(key="VP")] + pub vp: Option, + + #[pdf(key="Annots")] + pub annotations: Lazy>>, + + #[pdf(other)] + pub other: Dictionary, +} +fn inherit<'a, T: 'a, F>(mut parent: &'a PageTree, f: F) -> Result> + where F: Fn(&'a PageTree) -> Option +{ + loop { + match (&parent.parent, f(parent)) { + (_, Some(t)) => return Ok(Some(t)), + (Some(ref p), None) => parent = p, + (None, None) => return Ok(None) + } + } +} + +impl Page { + pub fn new(parent: PagesRc) -> Page { + Page { + parent, + media_box: None, + crop_box: None, + trim_box: None, + resources: None, + contents: None, + rotate: 0, + metadata: None, + lgi: None, + vp: None, + other: Dictionary::new(), + annotations: Default::default(), + } + } + pub fn media_box(&self) -> Result { + match self.media_box { + Some(b) => Ok(b), + None => inherit(&self.parent, |pt| pt.media_box)? + .ok_or_else(|| PdfError::MissingEntry { typ: "Page", field: "MediaBox".into() }) + } + } + pub fn crop_box(&self) -> Result { + match self.crop_box { + Some(b) => Ok(b), + None => match inherit(&self.parent, |pt| pt.crop_box)? { + Some(b) => Ok(b), + None => self.media_box() + } + } + } + pub fn resources(&self) -> Result<&MaybeRef> { + match self.resources { + Some(ref r) => Ok(r), + None => inherit(&self.parent, |pt| pt.resources.as_ref())? + .ok_or_else(|| PdfError::MissingEntry { typ: "Page", field: "Resources".into() }) + } + } +} +impl SubType for Page {} + + +#[derive(Object, DataSize, Debug, ObjectWrite)] +pub struct PageLabel { + #[pdf(key="S")] + pub style: Option, + + #[pdf(key="P")] + pub prefix: Option, + + #[pdf(key="St")] + pub start: Option +} + +#[derive(Object, ObjectWrite, Debug, DataSize, Default, DeepClone, Clone)] +pub struct Resources { + #[pdf(key="ExtGState")] + pub graphics_states: HashMap, + + #[pdf(key="ColorSpace")] + pub color_spaces: HashMap, + + #[pdf(key="Pattern")] + pub pattern: HashMap>, + + // shading: Option, + #[pdf(key="XObject")] + pub xobjects: HashMap>, + // /XObject is a dictionary that map arbitrary names to XObjects + #[pdf(key="Font")] + pub fonts: HashMap>, + + #[pdf(key="Properties")] + pub properties: HashMap>, +} + + +#[derive(Debug, Object, ObjectWrite, DataSize, Clone, DeepClone)] +pub struct PatternDict { + #[pdf(key="PaintType")] + pub paint_type: Option, + + #[pdf(key="TilingType")] + pub tiling_type: Option, + + #[pdf(key="BBox")] + pub bbox: Rectangle, + + #[pdf(key="XStep")] + pub x_step: f32, + + #[pdf(key="YStep")] + pub y_step: f32, + + #[pdf(key="Resources")] + pub resources: Ref, + + #[pdf(key="Matrix")] + pub matrix: Option, +} + +#[derive(Debug, DataSize)] +pub enum Pattern { + Dict(PatternDict), + Stream(PatternDict, Vec), +} +impl Pattern { + pub fn dict(&self) -> &PatternDict { + match *self { + Pattern::Dict(ref d) => d, + Pattern::Stream(ref d, _) => d, + } + } +} +impl Object for Pattern { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let p = p.resolve(resolve)?; + match p { + Primitive::Dictionary(dict) => Ok(Pattern::Dict(PatternDict::from_dict(dict, resolve)?)), + Primitive::Stream(s) => { + let stream: Stream = Stream::from_stream(s, resolve)?; + let data = stream.data(resolve)?; + let ops = t!(parse_ops(&data, resolve)); + let dict = stream.info.info; + Ok(Pattern::Stream(dict, ops)) + } + p => Err(PdfError::UnexpectedPrimitive { expected: "Dictionary or Stream", found: p.get_debug_name() }) + } + } +} +impl ObjectWrite for Pattern { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + Pattern::Dict(ref d) => d.to_primitive(update), + Pattern::Stream(ref d, ref ops) => { + let data = serialize_ops(ops)?; + let stream = Stream::new_with_filters(d.clone(), data, vec![]); + stream.to_primitive(update) + } + } + } +} +impl DeepClone for Pattern { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + match *self { + Pattern::Dict(ref d) => Ok(Pattern::Dict(d.deep_clone(cloner)?)), + Pattern::Stream(ref dict, ref ops) => { + let old_resources = cloner.get(dict.resources)?; + let mut resources = Resources::default(); + let ops: Vec = ops.iter().map(|op| deep_clone_op(op, cloner, &old_resources, &mut resources)).collect::>>()?; + let dict = PatternDict { + resources: cloner.create(resources)?.get_ref(), + .. *dict + }; + Ok(Pattern::Stream(dict, ops)) + } + } + } +} + +#[derive(Object, ObjectWrite, DeepClone, Debug, DataSize, Copy, Clone)] +pub enum LineCap { + Butt = 0, + Round = 1, + Square = 2 +} +#[derive(Object, ObjectWrite, DeepClone, Debug, DataSize, Copy, Clone)] +pub enum LineJoin { + Miter = 0, + Round = 1, + Bevel = 2 +} + +#[derive(Object, ObjectWrite, DeepClone, Debug, DataSize, Clone)] +#[pdf(Type = "ExtGState?")] +/// `ExtGState` +pub struct GraphicsStateParameters { + #[pdf(key="LW")] + pub line_width: Option, + + #[pdf(key="LC")] + pub line_cap: Option, + + #[pdf(key="LJ")] + pub line_join: Option, + + #[pdf(key="ML")] + pub miter_limit: Option, + + #[pdf(key="D")] + pub dash_pattern: Option>, + + #[pdf(key="RI")] + pub rendering_intent: Option, + + #[pdf(key="OP")] + pub overprint: Option, + + #[pdf(key="op")] + pub overprint_fill: Option, + + #[pdf(key="OPM")] + pub overprint_mode: Option, + + #[pdf(key="Font")] + pub font: Option<(Ref, f32)>, + + // BG + // BG2 + // UCR + // UCR2 + // TR + // TR2 + // HT + // FL + // SM + // SA + + #[pdf(key="BM")] + pub blend_mode: Option, + + #[pdf(key="SMask")] + pub smask: Option, + + + #[pdf(key="CA")] + pub stroke_alpha: Option, + + #[pdf(key="ca")] + pub fill_alpha: Option, + + #[pdf(key="AIS")] + pub alpha_is_shape: Option, + + #[pdf(key="TK")] + pub text_knockout: Option, + + #[pdf(other)] + _other: Dictionary +} + +#[derive(Object, Debug, DataSize, DeepClone)] +#[pdf(is_stream)] +pub enum XObject { + #[pdf(name="PS")] + Postscript (PostScriptXObject), + Image (ImageXObject), + Form (FormXObject), +} +impl ObjectWrite for XObject { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let (subtype, mut stream) = match self { + XObject::Postscript(s) => ("PS", s.to_pdf_stream(update)?), + XObject::Form(s) => ("Form", s.stream.to_pdf_stream(update)?), + XObject::Image(s) => ("Image", s.inner.to_pdf_stream(update)?), + }; + stream.info.insert("Subtype", Name::from(subtype)); + stream.info.insert("Type", Name::from("XObject")); + Ok(stream.into()) + } +} + +/// A variant of XObject +pub type PostScriptXObject = Stream; + +#[derive(Debug, DataSize, Clone, DeepClone)] +pub struct ImageXObject { + pub inner: Stream +} +impl Object for ImageXObject { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let s = PdfStream::from_primitive(p, resolve)?; + Self::from_stream(s, resolve) + } +} +impl ObjectWrite for ImageXObject { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + self.inner.to_primitive(update) + } +} +impl Deref for ImageXObject { + type Target = ImageDict; + fn deref(&self) -> &ImageDict { + &self.inner.info + } +} + +pub enum ImageFormat { + Raw, + Jpeg, + Jp2k, + Jbig2, + CittFax, + Png +} + +impl ImageXObject { + pub fn from_stream(s: PdfStream, resolve: &impl Resolve) -> Result { + let inner = Stream::from_stream(s, resolve)?; + Ok(ImageXObject { inner }) + } + + /// Decode everything except for the final image encoding (jpeg, jbig2, jp2k, ...) + pub fn raw_image_data(&self, resolve: &impl Resolve) -> Result<(Arc<[u8]>, Option<&StreamFilter>)> { + match self.inner.inner_data { + StreamData::Generated(_) => Ok((self.inner.data(resolve)?, None)), + StreamData::Original(ref file_range, id) => { + let filters = self.inner.filters.as_slice(); + // decode all non image filters + let end = filters.iter().rposition(|f| match f { + StreamFilter::ASCIIHexDecode => false, + StreamFilter::ASCII85Decode => false, + StreamFilter::LZWDecode(_) => false, + StreamFilter::RunLengthDecode => false, + StreamFilter::Crypt => true, + _ => true + }).unwrap_or(filters.len()); + + let (normal_filters, image_filters) = filters.split_at(end); + let data = resolve.get_data_or_decode(id, file_range.clone(), normal_filters)?; + + match image_filters { + [] => Ok((data, None)), + [StreamFilter::DCTDecode(_)] | + [StreamFilter::CCITTFaxDecode(_)] | + [StreamFilter::JPXDecode] | + [StreamFilter::FlateDecode(_)] | + [StreamFilter::JBIG2Decode(_)] => Ok((data, Some(&image_filters[0]))), + _ => bail!("??? filters={:?}", image_filters) + } + } + } + } + + pub fn image_data(&self, resolve: &impl Resolve) -> Result> { + let (data, filter) = self.raw_image_data(resolve)?; + let filter = match filter { + Some(f) => f, + None => return Ok(data) + }; + let mut data = match filter { + StreamFilter::CCITTFaxDecode(ref params) => { + if self.inner.info.width != params.columns { + bail!("image width mismatch {} != {}", self.inner.info.width, params.columns); + } + let mut data = fax_decode(&data, params)?; + if params.rows == 0 { + // adjust size + data.truncate(self.inner.info.height as usize * self.inner.info.width as usize); + } + data + } + StreamFilter::DCTDecode(ref p) => dct_decode(&data, p)?, + StreamFilter::JPXDecode => jpx_decode(&data)?, + StreamFilter::JBIG2Decode(ref p) => { + let global_data = p.globals.as_ref().map(|s| s.data(resolve)).transpose()?; + jbig2_decode(&data, global_data.as_deref().unwrap_or_default())? + }, + StreamFilter::FlateDecode(ref p) => flate_decode(&data, p)?, + _ => unreachable!() + }; + if let Some(ref decode) = self.decode { + if decode == &[1.0, 0.0] && self.bits_per_component == Some(1) { + data.iter_mut().for_each(|b| *b = !*b); + } + } + Ok(data.into()) + } +} + +#[derive(Object, Debug, DataSize, DeepClone, ObjectWrite)] +#[pdf(Type="XObject", Subtype="PS")] +pub struct PostScriptDict { + // TODO + #[pdf(other)] + pub other: Dictionary +} + +#[derive(Object, Debug, Clone, DataSize, DeepClone, ObjectWrite, Default)] +#[pdf(Type="XObject?", Subtype="Image")] +/// A variant of XObject +pub struct ImageDict { + #[pdf(key="Width")] + pub width: u32, + #[pdf(key="Height")] + pub height: u32, + + #[pdf(key="ColorSpace")] + pub color_space: Option, + + #[pdf(key="BitsPerComponent")] + pub bits_per_component: Option, + // Note: only allowed values are 1, 2, 4, 8, 16. Enum? + + #[pdf(key="Intent")] + pub intent: Option, + // Note: default: "the current rendering intent in the graphics state" - I don't think this + // ought to have a default then + + #[pdf(key="ImageMask", default="false")] + pub image_mask: bool, + + // Mask: stream or array + #[pdf(key="Mask")] + pub mask: Option, + // + /// Describes how to map image samples into the range of values appropriate for the image’s color space. + /// If `image_mask`: either [0 1] or [1 0]. Else, the length must be twice the number of color + /// components required by `color_space` (key ColorSpace) + // (see Decode arrays page 344) + #[pdf(key="Decode")] + pub decode: Option>, + + #[pdf(key="Interpolate", default="false")] + pub interpolate: bool, + + // Alternates: Vec + + // SMask (soft mask): stream + // SMaskInData: i32 + ///The integer key of the image’s entry in the structural parent tree + #[pdf(key="StructParent")] + pub struct_parent: Option, + + #[pdf(key="ID")] + pub id: Option, + + #[pdf(key="SMask")] + pub smask: Option>>, + + // OPI: dict + // Metadata: stream + // OC: dict + + #[pdf(other)] + pub other: Dictionary +} + + +#[derive(Object, Debug, Copy, Clone, DataSize, DeepClone, ObjectWrite)] +pub enum RenderingIntent { + AbsoluteColorimetric, + RelativeColorimetric, + Saturation, + Perceptual, +} +impl RenderingIntent { + pub fn from_str(s: &str) -> Option { + match s { + "AbsoluteColorimetric" => Some(RenderingIntent::AbsoluteColorimetric), + "RelativeColorimetric" => Some(RenderingIntent::RelativeColorimetric), + "Perceptual" => Some(RenderingIntent::Perceptual), + "Saturation" => Some(RenderingIntent::Saturation), + _ => None + } + } + pub fn to_str(self) -> &'static str { + match self { + RenderingIntent::AbsoluteColorimetric => "AbsoluteColorimetric", + RenderingIntent::RelativeColorimetric => "RelativeColorimetric", + RenderingIntent::Perceptual => "Perceptual", + RenderingIntent::Saturation => "Saturation", + } + } +} + +#[derive(Object, Debug, DataSize, DeepClone, ObjectWrite, Clone, Default)] +#[pdf(Type="XObject?", Subtype="Form")] +pub struct FormDict { + #[pdf(key="FormType", default="1")] + pub form_type: i32, + + #[pdf(key="Name")] + pub name: Option, + + #[pdf(key="LastModified")] + pub last_modified: Option, + + #[pdf(key="BBox")] + pub bbox: Rectangle, + + #[pdf(key="Matrix")] + pub matrix: Option, + + #[pdf(key="Resources")] + pub resources: Option>, + + #[pdf(key="Group")] + pub group: Option, + + #[pdf(key="Ref")] + pub reference: Option, + + #[pdf(key="Metadata")] + pub metadata: Option>>, + + #[pdf(key="PieceInfo")] + pub piece_info: Option, + + #[pdf(key="StructParent")] + pub struct_parent: Option, + + #[pdf(key="StructParents")] + pub struct_parents: Option, + + #[pdf(key="OPI")] + pub opi: Option, + + #[pdf(other)] + pub other: Dictionary, +} + + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize)] +pub struct InteractiveFormDictionary { + #[pdf(key="Fields")] + pub fields: Vec>, + + #[pdf(key="NeedAppearances", default="false")] + pub need_appearences: bool, + + #[pdf(key="SigFlags", default="0")] + pub sig_flags: u32, + + #[pdf(key="CO")] + pub co: Option>>, + + #[pdf(key="DR")] + pub dr: Option>, + + #[pdf(key="DA")] + pub da: Option, + + #[pdf(key="Q")] + pub q: Option, + + #[pdf(key="XFA")] + pub xfa: Option, +} + +#[derive(Object, ObjectWrite, Debug, Copy, Clone, PartialEq, DataSize)] +pub enum FieldType { + #[pdf(name="Btn")] + Button, + #[pdf(name="Tx")] + Text, + #[pdf(name="Ch")] + Choice, + #[pdf(name="Sig")] + Signature, + #[pdf(name="SigRef")] + SignatureReference, +} + +#[derive(Object, ObjectWrite, Debug)] +#[pdf(Type="SV")] +pub struct SeedValueDictionary { + #[pdf(key="Ff", default="0")] + pub flags: u32, + #[pdf(key="Filter")] + pub filter: Option, + #[pdf(key="SubFilter")] + pub sub_filter: Option>, + #[pdf(key="V")] + pub value: Option, + #[pdf(key="DigestMethod")] + pub digest_method: Vec, + #[pdf(other)] + pub other: Dictionary +} + +#[derive(Object, ObjectWrite, Debug)] +#[pdf(Type="Sig?")] +pub struct SignatureDictionary { + #[pdf(key="Filter")] + pub filter: Name, + #[pdf(key="SubFilter")] + pub sub_filter: Name, + #[pdf(key="ByteRange")] + pub byte_range: Vec, + #[pdf(key="Contents")] + pub contents: PdfString, + #[pdf(key="Cert")] + pub cert: Vec, + #[pdf(key="Reference")] + pub reference: Option, + #[pdf(key="Name")] + pub name: Option, + #[pdf(key="M")] + pub m: Option, + #[pdf(key="Location")] + pub location: Option, + #[pdf(key="Reason")] + pub reason: Option, + #[pdf(key="ContactInfo")] + pub contact_info: Option, + #[pdf(key="V")] + pub v: i32, + #[pdf(key="R")] + pub r: i32, + #[pdf(key="Prop_Build")] + pub prop_build: Dictionary, + #[pdf(key="Prop_AuthTime")] + pub prop_auth_time: i32, + #[pdf(key="Prop_AuthType")] + pub prop_auth_type: Name, + #[pdf(other)] + pub other: Dictionary +} + +#[derive(Object, ObjectWrite, Debug)] +#[pdf(Type="SigRef?")] +pub struct SignatureReferenceDictionary { + #[pdf(key="TransformMethod")] + pub transform_method: Name, + #[pdf(key="TransformParams")] + pub transform_params: Option, + #[pdf(key="Data")] + pub data: Option, + #[pdf(key="DigestMethod")] + pub digest_method: Option, + #[pdf(other)] + pub other: Dictionary +} + + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize)] +#[pdf(Type="Annot?")] +pub struct Annot { + #[pdf(key="Subtype")] + pub subtype: Name, + + #[pdf(key="Rect")] + pub rect: Option, + + #[pdf(key="Contents")] + pub contents: Option, + + #[pdf(key="P")] + pub page: Option, + + #[pdf(key="NM")] + pub annotation_name: Option, + + #[pdf(key="M")] + pub date: Option, + + #[pdf(key="F", default="0")] + pub annot_flags: u32, + + #[pdf(key="AP")] + pub appearance_streams: Option>, + + #[pdf(key="AS")] + pub appearance_state: Option, + + #[pdf(key="Border")] + pub border: Option, + + #[pdf(key="C")] + pub color: Option, + + #[pdf(key="InkList")] + pub ink_list: Option, + + #[pdf(other)] + pub other: Dictionary, +} + +#[derive(Object, ObjectWrite, Debug, DataSize, Clone)] +pub struct FieldDictionary { + #[pdf(key="FT")] + pub typ: Option, + + #[pdf(key="Parent")] + pub parent: Option>, + + #[pdf(key="Kids")] + pub kids: Vec>, + + #[pdf(key="T")] + pub name: Option, + + #[pdf(key="TU")] + pub alt_name: Option, + + #[pdf(key="TM")] + pub mapping_name: Option, + + #[pdf(key="Ff", default="0")] + pub flags: u32, + + #[pdf(key="SigFlags", default="0")] + pub sig_flags: u32, + + #[pdf(key="V")] + pub value: Primitive, + + #[pdf(key="DV")] + pub default_value: Primitive, + + #[pdf(key="DR")] + pub default_resources: Option>, + + #[pdf(key="AA")] + pub actions: Option, + + #[pdf(key="Rect")] + pub rect: Option, + + #[pdf(key="MaxLen")] + pub max_len: Option, + + #[pdf(key="Subtype")] + pub subtype: Option, + + #[pdf(other)] + pub other: Dictionary +} + +#[derive(Object, ObjectWrite, Debug, DataSize, Clone, DeepClone)] +pub struct AppearanceStreams { + #[pdf(key="N")] + pub normal: Ref, + + #[pdf(key="R")] + pub rollover: Option>, + + #[pdf(key="D")] + pub down: Option>, +} + +#[derive(Clone, Debug, DeepClone)] +pub enum AppearanceStreamEntry { + Single(FormXObject), + Dict(HashMap) +} +impl Object for AppearanceStreamEntry { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p.resolve(resolve)? { + p @ Primitive::Dictionary(_) => Object::from_primitive(p, resolve).map(AppearanceStreamEntry::Dict), + p @ Primitive::Stream(_) => Object::from_primitive(p, resolve).map(AppearanceStreamEntry::Single), + p => Err(PdfError::UnexpectedPrimitive {expected: "Dict or Stream", found: p.get_debug_name()}) + } + } +} +impl ObjectWrite for AppearanceStreamEntry { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + AppearanceStreamEntry::Dict(d) => d.to_primitive(update), + AppearanceStreamEntry::Single(s) => s.to_primitive(update), + } + } +} +impl DataSize for AppearanceStreamEntry { + const IS_DYNAMIC: bool = true; + const STATIC_HEAP_SIZE: usize = std::mem::size_of::(); + fn estimate_heap_size(&self) -> usize { + match self { + AppearanceStreamEntry::Dict(d) => d.estimate_heap_size(), + AppearanceStreamEntry::Single(s) => s.estimate_heap_size() + } + } +} + +#[derive(Debug, DataSize, Clone, Object, ObjectWrite, DeepClone)] +pub enum Counter { + #[pdf(name="D")] + Arabic, + #[pdf(name="r")] + RomanUpper, + #[pdf(name="R")] + RomanLower, + #[pdf(name="a")] + AlphaUpper, + #[pdf(name="A")] + AlphaLower +} + +#[derive(Debug, DataSize)] +pub enum NameTreeNode { + /// + Intermediate (Vec>>), + /// + Leaf (Vec<(PdfString, T)>) + +} +/// Note: The PDF concept of 'root' node is an intermediate or leaf node which has no 'Limits' +/// entry. Hence, `limits`, +#[derive(Debug, DataSize)] +pub struct NameTree { + pub limits: Option<(PdfString, PdfString)>, + pub node: NameTreeNode, +} +impl NameTree { + pub fn walk(&self, r: &impl Resolve, callback: &mut dyn FnMut(&PdfString, &T)) -> Result<(), PdfError> { + match self.node { + NameTreeNode::Leaf(ref items) => { + for (name, val) in items { + callback(name, val); + } + } + NameTreeNode::Intermediate(ref items) => { + for &tree_ref in items { + let tree = r.get(tree_ref)?; + tree.walk(r, callback)?; + } + } + } + Ok(()) + } +} + +impl Object for NameTree { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut dict = t!(p.resolve(resolve)?.into_dictionary()); + + let limits = match dict.remove("Limits") { + Some(limits) => { + let limits = limits.resolve(resolve)?.into_array()?; + if limits.len() != 2 { + bail!("Error reading NameTree: 'Limits' is not of length 2"); + } + let min = limits[0].clone().into_string()?; + let max = limits[1].clone().into_string()?; + + Some((min, max)) + } + None => None + }; + + let kids = dict.remove("Kids"); + let names = dict.remove("Names"); + // If no `kids`, try `names`. Else there is an error. + Ok(match (kids, names) { + (Some(kids), _) => { + let kids = t!(kids.resolve(resolve)?.into_array()?.iter().map(|kid| + Ref::>::from_primitive(kid.clone(), resolve) + ).collect::>>()); + NameTree { + limits, + node: NameTreeNode::Intermediate (kids) + } + } + (None, Some(names)) => { + let names = names.resolve(resolve)?.into_array()?; + let mut new_names = Vec::new(); + for pair in names.chunks_exact(2) { + let name = pair[0].clone().resolve(resolve)?.into_string()?; + let value = t!(T::from_primitive(pair[1].clone(), resolve)); + new_names.push((name, value)); + } + NameTree { + limits, + node: NameTreeNode::Leaf (new_names), + } + } + (None, None) => { + warn!("Neither Kids nor Names present in NameTree node."); + NameTree { + limits, + node: NameTreeNode::Intermediate(vec![]) + } + } + }) + } +} + +impl ObjectWrite for NameTree { + fn to_primitive(&self, _update: &mut impl Updater) -> Result { + todo!("impl ObjectWrite for NameTree") + } +} + +#[derive(DataSize, Debug)] +pub struct NumberTree { + pub limits: Option<(i32, i32)>, + pub node: NumberTreeNode, +} + +#[derive(DataSize, Debug)] +pub enum NumberTreeNode { + Leaf(Vec<(i32, T)>), + Intermediate(Vec>>), +} +impl Object for NumberTree { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut dict = p.resolve(resolve)?.into_dictionary()?; + + let limits = match dict.remove("Limits") { + Some(limits) => { + let limits = t!(limits.resolve(resolve)?.into_array()); + if limits.len() != 2 { + bail!("Error reading NameTree: 'Limits' is not of length 2"); + } + let min = t!(limits[0].as_integer()); + let max = t!(limits[1].as_integer()); + + Some((min, max)) + } + None => None + }; + + let kids = dict.remove("Kids"); + let nums = dict.remove("Nums"); + match (kids, nums) { + (Some(kids), _) => { + let kids = t!(kids.resolve(resolve)?.into_array()?.iter().map(|kid| + Ref::>::from_primitive(kid.clone(), resolve) + ).collect::>>()); + Ok(NumberTree { + limits, + node: NumberTreeNode::Intermediate (kids) + }) + } + (None, Some(nums)) => { + let list = nums.into_array()?; + let mut items = Vec::with_capacity(list.len() / 2); + for (key, item) in list.into_iter().tuples() { + let idx = t!(key.as_integer()); + let val = t!(T::from_primitive(item, resolve)); + items.push((idx, val)); + } + Ok(NumberTree { + limits, + node: NumberTreeNode::Leaf(items) + }) + } + (None, None) => { + warn!("Neither Kids nor Names present in NumberTree node."); + Ok(NumberTree { + limits, + node: NumberTreeNode::Intermediate(vec![]) + }) + } + } + } +} +impl ObjectWrite for NumberTree { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let mut dict = Dictionary::new(); + if let Some(limits) = self.limits { + dict.insert("Limits", vec![limits.0.into(), limits.1.into()]); + } + match self.node { + NumberTreeNode::Leaf(ref items) => { + let mut nums = Vec::with_capacity(items.len() * 2); + for &(idx, ref label) in items { + nums.push(idx.into()); + nums.push(label.to_primitive(update)?); + } + dict.insert("Nums", nums); + } + NumberTreeNode::Intermediate(ref kids) => { + dict.insert("Kids", kids.iter().map(|r| r.get_inner().into()).collect_vec()); + } + } + Ok(dict.into()) + } +} +impl NumberTree { + pub fn walk(&self, r: &impl Resolve, callback: &mut dyn FnMut(i32, &T)) -> Result<(), PdfError> { + match self.node { + NumberTreeNode::Leaf(ref items) => { + for &(idx, ref val) in items { + callback(idx, val); + } + } + NumberTreeNode::Intermediate(ref items) => { + for &tree_ref in items { + let tree = r.get(tree_ref)?; + tree.walk(r, callback)?; + } + } + } + Ok(()) + } +} + +#[derive(Object, ObjectWrite, Clone, DeepClone, Debug)] +pub struct LageLabel { + #[pdf(key="S")] + style: Option, + + #[pdf(key="P")] + prefix: Option, + + #[pdf(key="St")] + start: Option, +} + +#[derive(Debug, Clone, DataSize)] +pub enum DestView { + // left, top, zoom + XYZ { left: Option, top: Option, zoom: f32 }, + Fit, + FitH { top: f32 }, + FitV { left: f32 }, + FitR(Rectangle), + FitB, + FitBH { top: f32 } +} + +#[derive(Debug, Clone, DataSize)] +pub enum MaybeNamedDest { + Named(PdfString), + Direct(Dest), +} + +#[derive(Debug, Clone, DataSize)] +pub struct Dest { + pub page: Option>, + pub view: DestView +} +impl Object for Dest { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let p = match p { + Primitive::Reference(r) => resolve.resolve(r)?, + p => p + }; + let p = match p { + Primitive::Dictionary(mut dict) => dict.require("Dest", "D")?, + p => p + }; + let array = t!(p.as_array(), p); + Dest::from_array(array, resolve) + } +} +impl Dest { + fn from_array(array: &[Primitive], resolve: &impl Resolve) -> Result { + let page = Object::from_primitive(try_opt!(array.get(0)).clone(), resolve)?; + let kind = try_opt!(array.get(1)); + let view = match kind.as_name()? { + "XYZ" => DestView::XYZ { + left: match *try_opt!(array.get(2)) { + Primitive::Null => None, + Primitive::Integer(n) => Some(n as f32), + Primitive::Number(f) => Some(f), + ref p => return Err(PdfError::UnexpectedPrimitive { expected: "Number | Integer | Null", found: p.get_debug_name() }), + }, + top: match *try_opt!(array.get(3)) { + Primitive::Null => None, + Primitive::Integer(n) => Some(n as f32), + Primitive::Number(f) => Some(f), + ref p => return Err(PdfError::UnexpectedPrimitive { expected: "Number | Integer | Null", found: p.get_debug_name() }), + }, + zoom: match array.get(4) { + Some(Primitive::Null) => 0.0, + Some(&Primitive::Integer(n)) => n as f32, + Some(&Primitive::Number(f)) => f, + Some(p) => return Err(PdfError::UnexpectedPrimitive { expected: "Number | Integer | Null", found: p.get_debug_name() }), + None => 0.0, + }, + }, + "Fit" => DestView::Fit, + "FitH" => DestView::FitH { + top: try_opt!(array.get(2)).as_number()? + }, + "FitV" => DestView::FitV { + left: try_opt!(array.get(2)).as_number()? + }, + "FitR" => DestView::FitR(Rectangle { + left: try_opt!(array.get(2)).as_number()?, + bottom: try_opt!(array.get(3)).as_number()?, + right: try_opt!(array.get(4)).as_number()?, + top: try_opt!(array.get(5)).as_number()?, + }), + "FitB" => DestView::FitB, + "FitBH" => DestView::FitBH { + top: try_opt!(array.get(2)).as_number()? + }, + name => return Err(PdfError::UnknownVariant { id: "Dest", name: name.into() }) + }; + Ok(Dest { + page, + view + }) + } +} +impl Object for MaybeNamedDest { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let p = match p { + Primitive::Reference(r) => resolve.resolve(r)?, + p => p + }; + let p = match p { + Primitive::Dictionary(mut dict) => dict.require("Dest", "D")?, + Primitive::String(s) => return Ok(MaybeNamedDest::Named(s)), + p => p + }; + let array = t!(p.as_array(), p); + Dest::from_array(array, resolve).map(MaybeNamedDest::Direct) + } +} +impl ObjectWrite for MaybeNamedDest { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + MaybeNamedDest::Named(s) => Ok(Primitive::String(s.clone())), + MaybeNamedDest::Direct(d) => d.to_primitive(update) + } + } +} +impl ObjectWrite for Dest { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + let mut arr = vec![self.page.to_primitive(update)?]; + match self.view { + DestView::XYZ { left, top, zoom } => { + arr.push(Primitive::Name("XYZ".into())); + arr.push(left.to_primitive(update)?); + arr.push(top.to_primitive(update)?); + arr.push(Primitive::Number(zoom)); + } + DestView::Fit => { + arr.push(Primitive::Name("Fit".into())); + } + DestView::FitH { top } => { + arr.push(Primitive::Name("FitH".into())); + arr.push(Primitive::Number(top)); + } + DestView::FitV { left } => { + arr.push(Primitive::Name("FitV".into())); + arr.push(Primitive::Number(left)); + } + DestView::FitR(rect) => { + arr.push(Primitive::Name("FitR".into())); + arr.push(Primitive::Number(rect.left)); + arr.push(Primitive::Number(rect.bottom)); + arr.push(Primitive::Number(rect.right)); + arr.push(Primitive::Number(rect.top)); + } + DestView::FitB => { + arr.push(Primitive::Name("FitB".into())); + } + DestView::FitBH { top } => { + arr.push(Primitive::Name("FitBH".into())); + arr.push(Primitive::Number(top)); + } + } + Ok(Primitive::Array(arr)) + } +} + +/// There is one `NameDictionary` associated with each PDF file. +#[derive(Object, ObjectWrite, Debug, DataSize)] +pub struct NameDictionary { + #[pdf(key="Pages")] + pub pages: Option>, + + #[pdf(key="Dests")] + pub dests: Option>>, + + #[pdf(key="AP")] + pub ap: Option>, + + #[pdf(key="JavaScript")] + pub javascript: Option>, + + #[pdf(key="Templates")] + pub templates: Option>, + + #[pdf(key="IDS")] + pub ids: Option>, + + #[pdf(key="URLS")] + pub urls: Option>, + + #[pdf(key="EmbeddedFiles")] + pub embedded_files: Option>, + /* + #[pdf(key="AlternativePresentations")] + alternate_presentations: NameTree, + #[pdf(key="Renditions")] + renditions: NameTree, + */ +} + +/* Embedded file streams can be associated with the document as a whole through + * the EmbeddedFiles entry (PDF 1.4) in the PDF document’s name dictionary + * (see Section 3.6.3, “Name Dictionary”). + * The associated name tree maps name strings to file specifications that refer + * to embedded file streams through their EF entries. +*/ + +#[derive(Object, ObjectWrite, Debug, Clone, DataSize, DeepClone)] +pub struct FileSpec { + #[pdf(key="EF")] + pub ef: Option>>>, + /* + #[pdf(key="RF")] + rf: Option>, + */ +} + +/// Used only as elements in `FileSpec` +#[derive(Object, ObjectWrite, Debug, Clone, DeepClone)] +pub struct Files { + #[pdf(key="F")] + pub f: Option, + #[pdf(key="UF")] + pub uf: Option, + #[pdf(key="DOS")] + pub dos: Option, + #[pdf(key="Mac")] + pub mac: Option, + #[pdf(key="Unix")] + pub unix: Option, +} +impl DataSize for Files { + const IS_DYNAMIC: bool = T::IS_DYNAMIC; + const STATIC_HEAP_SIZE: usize = 5 * Option::::STATIC_HEAP_SIZE; + + fn estimate_heap_size(&self) -> usize { + self.f.as_ref().map(|t| t.estimate_heap_size()).unwrap_or(0) + + self.uf.as_ref().map(|t| t.estimate_heap_size()).unwrap_or(0) + + self.dos.as_ref().map(|t| t.estimate_heap_size()).unwrap_or(0) + + self.mac.as_ref().map(|t| t.estimate_heap_size()).unwrap_or(0) + + self.unix.as_ref().map(|t| t.estimate_heap_size()).unwrap_or(0) + } + +} + +/// PDF Embedded File Stream. +#[derive(Object, Debug, Clone, DataSize, DeepClone, ObjectWrite)] +pub struct EmbeddedFile { + #[pdf(key="Subtype")] + subtype: Option, + + #[pdf(key="Params")] + pub params: Option, +} + +#[derive(Object, Debug, Clone, DataSize, DeepClone, ObjectWrite)] +pub struct EmbeddedFileParamDict { + #[pdf(key="Size")] + pub size: Option, + + #[pdf(key="CreationDate")] + creationdate: Option, + + #[pdf(key="ModDate")] + moddate: Option, + + #[pdf(key="Mac")] + mac: Option, + + #[pdf(key="CheckSum")] + checksum: Option, +} + +#[derive(Object, Debug, Clone, DataSize)] +pub struct OutlineItem { + #[pdf(key="Title")] + pub title: Option, + + #[pdf(key="Prev")] + pub prev: Option>, + + #[pdf(key="Next")] + pub next: Option>, + + #[pdf(key="First")] + pub first: Option>, + + #[pdf(key="Last")] + pub last: Option>, + + #[pdf(key="Count", default="0")] + pub count: i32, + + #[pdf(key="Dest")] + pub dest: Option, + + #[pdf(key="A")] + pub action: Option, + + #[pdf(key="SE")] + pub se: Option, + + #[pdf(key="C")] + pub color: Option>, + + #[pdf(key="F")] + pub flags: Option, +} + +#[derive(Clone, Debug, DataSize)] +pub enum Action { + Goto(MaybeNamedDest), + Other(Dictionary) +} +impl Object for Action { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + let mut d = t!(p.resolve(resolve)?.into_dictionary()); + let s = try_opt!(d.get("S")).as_name()?; + match s { + "GoTo" => { + let dest = t!(MaybeNamedDest::from_primitive(try_opt!(d.remove("D")), resolve)); + Ok(Action::Goto(dest)) + } + _ => Ok(Action::Other(d)) + } + } +} +impl ObjectWrite for Action { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + match self { + Action::Goto(dest) => { + let mut dict = Dictionary::new(); + dict.insert("D", dest.to_primitive(update)?); + Ok(Primitive::Dictionary(dict)) + } + Action::Other(dict) => Ok(Primitive::Dictionary(dict.clone())) + } + } +} + +#[derive(Object, ObjectWrite, Debug, DataSize)] +#[pdf(Type="Outlines?")] +pub struct Outlines { + #[pdf(key="Count", default="0")] + pub count: i32, + + #[pdf(key="First")] + pub first: Option>, + + #[pdf(key="Last")] + pub last: Option>, + +} + +/// ISO 32000-2:2020(E) 7.9.5 Rectangles (Pg 134) +/// specifying the lower-left x, lower-left y, +/// upper-right x, and upper-right y coordinates +/// of the rectangle, in that order. The other two +/// corners of the rectangle are then assumed to +/// have coordinates (ll x , ur y ) and +/// (ur x , ll y ). +/// Also see Table 74, key BBox definition Pg 221 +/// defining top, left, bottom, right labeling +#[derive(Debug, Copy, Clone, DataSize, Default)] +pub struct Rectangle { + pub left: f32, + pub bottom: f32, + pub right: f32, + pub top: f32, +} +#[deprecated] +pub type Rect = Rectangle; + +impl Object for Rectangle { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + let arr = p.resolve(r)?.into_array()?; + if arr.len() != 4 { + bail!("len != 4 {:?}", arr); + } + Ok(Rectangle { + left: arr[0].as_number()?, + bottom: arr[1].as_number()?, + right: arr[2].as_number()?, + top: arr[3].as_number()? + }) + } +} +impl ObjectWrite for Rectangle { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + Primitive::array::([self.left, self.bottom, self.right, self.top].iter(), update) + } +} + + +// Stuff from chapter 10 of the PDF 1.7 ref + +#[derive(Object, ObjectWrite, Debug, DataSize)] +pub struct MarkInformation { // TODO no /Type + /// indicating whether the document conforms to Tagged PDF conventions + #[pdf(key="Marked", default="false")] + pub marked: bool, + /// Indicating the presence of structure elements that contain user properties attributes + #[pdf(key="UserProperties", default="false")] + pub user_properties: bool, + /// Indicating the presence of tag suspects + #[pdf(key="Suspects", default="false")] + pub suspects: bool, +} + +#[derive(Object, ObjectWrite, Debug, DataSize)] +#[pdf(Type = "StructTreeRoot")] +pub struct StructTreeRoot { + #[pdf(key="K")] + pub children: Vec, +} +#[derive(Object, ObjectWrite, Debug, DataSize)] +pub struct StructElem { + #[pdf(key="S")] + pub struct_type: StructType, + + #[pdf(key="P")] + pub parent: Ref, + + #[pdf(key="ID")] + pub id: Option, + + /// `Pg`: A page object representing a page on which some or all of the content items designated by the K entry are rendered. + #[pdf(key="Pg")] + pub page: Option>, +} + +#[derive(Object, ObjectWrite, Debug, DataSize)] +pub enum StructType { + Document, + Part, + Art, + Sect, + Div, + BlockQuote, + Caption, + TOC, + TOCI, + Index, + NonStruct, + Private, + Book, + P, + H, + H1, + H2, + H3, + H4, + H5, + H6, + L, + Ll, + Lbl, + LBody, + Table, + TR, + TH, + TD, + THead, + TBody, + TFoot, + Span, + Quote, + Note, + Reference, + BibEntry, + Code, + Link, + Annot, + Ruby, + RB, + RT, + RP, + Warichu, + WT, + WP, + Figure, + Formula, + Form, + #[pdf(other)] + Other(String), +} + +#[derive(Object, ObjectWrite, Debug, DataSize)] +pub enum Trapped { + True, + False, + Unknown, +} + +#[derive(Object, ObjectWrite, Debug, DataSize, Default)] +pub struct InfoDict { + #[pdf(key="Title")] + pub title: Option, + + #[pdf(key="Author")] + pub author: Option, + + #[pdf(key="Subject")] + pub subject: Option, + + #[pdf(key="Keywords")] + pub keywords: Option, + + #[pdf(key="Creator")] + pub creator: Option, + + #[pdf(key="Producer")] + pub producer: Option, + + #[pdf(key="CreationDate")] + pub creation_date: Option, + + #[pdf(key="ModDate")] + pub mod_date: Option, + + #[pdf(key="Trapped")] + pub trapped: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_struct_type() { + assert!(matches!( + StructType::from_primitive(Primitive::Name("BibEntry".into()), &NoResolve), + Ok(StructType::BibEntry) + )); + + let result = + StructType::from_primitive(Primitive::Name("CustomStructType".into()), &NoResolve); + if let Ok(StructType::Other(name)) = &result { + assert_eq!(name, "CustomStructType"); + } else { + panic!("Incorrect result of {:?}", &result); + } + } + + #[test] + fn test_field_type() { + assert_eq!( + FieldType::from_primitive(Primitive::Name("Tx".into()), &NoResolve).unwrap(), + FieldType::Text + ); + } +} diff --git a/src-pdfrs/pdf/src/parser/lexer/mod.rs b/src-pdfrs/pdf/src/parser/lexer/mod.rs new file mode 100644 index 0000000..34f37a4 --- /dev/null +++ b/src-pdfrs/pdf/src/parser/lexer/mod.rs @@ -0,0 +1,575 @@ +/// Lexing an input file, in the sense of breaking it up into substrings based on delimiters and +/// whitespace. + +use std::str::FromStr; +use std::ops::{Range, Deref, RangeFrom}; +use std::borrow::Cow; + +use crate::error::*; +use crate::primitive::Name; + +mod str; +pub use self::str::{StringLexer, HexStringLexer}; + + +/// `Lexer` has functionality to jump around and traverse the PDF lexemes of a string in any direction. +#[derive(Copy, Clone)] +#[allow(dead_code)] +pub struct Lexer<'a> { + pos: usize, + buf: &'a [u8], + file_offset: usize, +} + +// find the position where condition(data[pos-1]) == false and condition(data[pos]) == true +#[inline] +fn boundary_rev(data: &[u8], pos: usize, condition: impl Fn(u8) -> bool) -> usize { + match data[.. pos].iter().rposition(|&b| !condition(b)) { + Some(start) => start + 1, + None => 0 + } +} + +// find the position where condition(data[pos-1]) == true and condition(data[pos]) == false +#[inline] +fn boundary(data: &[u8], pos: usize, condition: impl Fn(u8) -> bool) -> usize { + match data[pos ..].iter().position(|&b| !condition(b)) { + Some(start) => pos + start, + None => data.len() + } +} + +#[inline] +fn is_whitespace(b: u8) -> bool { + matches!(b, 0 | b' ' | b'\r' | b'\n' | b'\t') +} +#[inline] +fn not(f: impl Fn(T) -> bool) -> impl Fn(T) -> bool { + move |t| !f(t) +} +impl<'a> Lexer<'a> { + pub fn new(buf: &'a [u8]) -> Lexer<'a> { + Lexer { + pos: 0, + buf, + file_offset: 0 + } + } + pub fn with_offset(buf: &'a [u8], file_offset: usize) -> Lexer<'a> { + Lexer { + pos: 0, + buf, + file_offset + } + } + + /// Returns next lexeme. Lexer moves to the next byte after the lexeme. (needs to be tested) + #[allow(clippy::should_implement_trait)] + pub fn next(&mut self) -> Result> { + let (lexeme, pos) = self.next_word()?; + self.pos = pos; + Ok(lexeme) + } + + /// consume the whitespace sequence following the stream start + pub fn next_stream(&mut self) -> Result<()> { + let pos = self.skip_whitespace(self.pos)?; + if !self.buf[pos ..].starts_with(b"stream") { + // bail!("next token isn't 'stream'"); + } + + let &b0 = self.buf.get(pos + 6).ok_or(PdfError::EOF)?; + if b0 == b'\n' { + self.pos = pos + 7; + } else if b0 == b'\r' { + let &b1 = self.buf.get(pos + 7).ok_or(PdfError::EOF)?; + if b1 != b'\n' { + bail!("invalid whitespace following 'stream'"); + // bail!("invalid whitespace following 'stream'"); + } + self.pos = pos + 8; + } else { + bail!("invalid whitespace"); + } + Ok(()) + } + /// Gives previous lexeme. Lexer moves to the first byte of this lexeme. (needs to be tested) + pub fn back(&mut self) -> Result> { + //println!("back: {:?}", String::from_utf8_lossy(&self.buf[self.pos.saturating_sub(20) .. self.pos])); + + // first reverse until we find non-whitespace + let end_pos = boundary_rev(self.buf, self.pos, is_whitespace); + let start_pos = boundary_rev(self.buf, end_pos, not(is_whitespace)); + self.pos = start_pos; + + Ok(self.new_substr(start_pos .. end_pos)) + } + + /// Look at the next lexeme. Will return empty substr if the next character is EOF. + pub fn peek(&self) -> Result> { + match self.next_word() { + Ok((substr, _)) => Ok(substr), + Err(PdfError::EOF) => Ok(self.new_substr(self.pos..self.pos)), + Err(e) => Err(e), + } + + } + + /// Returns `Ok` if the next lexeme matches `expected` - else `Err`. + pub fn next_expect(&mut self, expected: &'static str) -> Result<()> { + let word = self.next()?; + if word.equals(expected.as_bytes()) { + Ok(()) + } else { + Err(PdfError::UnexpectedLexeme { + pos: self.pos, + lexeme: word.to_string(), + expected + }) + } + } + + /// skip whitespaces and return the position of the first non-whitespace character + #[inline] + fn skip_whitespace(&self, pos: usize) -> Result { + // Move away from eventual whitespace + let pos = boundary(self.buf, pos, is_whitespace); + if pos >= self.buf.len() { + Err(PdfError::EOF) + } else { + Ok(pos) + } + } + + /// Used by next, peek and back - returns substring and new position + /// If forward, places pointer at the next non-whitespace character. + /// If backward, places pointer at the start of the current word. + // TODO ^ backward case is actually not tested or.. thought about that well. + fn next_word(&self) -> Result<(Substr<'a>, usize)> { + if self.pos == self.buf.len() { + return Err(PdfError::EOF); + } + let mut pos = self.skip_whitespace(self.pos)?; + while self.buf.get(pos) == Some(&b'%') { + pos += 1; + if let Some(off) = self.buf[pos..].iter().position(|&b| b == b'\n') { + pos += off+1; + } + + // Move away from eventual whitespace + pos = self.skip_whitespace(pos)?; + } + + let start_pos = pos; + + // If first character is delimiter, this lexeme only contains that character. + // - except << and >> which go together, and / which marks the start of a + // name token. + if self.is_delimiter(pos) { + if self.buf[pos] == b'/' { + pos = self.advance_pos(pos)?; + while !self.is_whitespace(pos) && !self.is_delimiter(pos) { + match self.advance_pos(pos) { + Ok(p) => pos = p, + Err(_) => break, + } + } + return Ok((self.new_substr(start_pos..pos), pos)); + } + + if let Some(slice) = self.buf.get(pos..=pos+1) { + if slice == b"<<" || slice == b">>" { + pos = self.advance_pos(pos)?; + } + } + + pos = self.advance_pos(pos)?; + return Ok((self.new_substr(start_pos..pos), pos)); + } + + // Read to past the end of lexeme + while !self.is_whitespace(pos) && !self.is_delimiter(pos) { + match self.advance_pos(pos) { + Ok(p) => pos = p, + Err(_) => break, + } + } + let result = self.new_substr(start_pos..pos); + + // Move away from whitespace again + //pos = self.skip_whitespace(pos)?; + Ok((result, pos)) + } + + /// Just a helper for next_word. + #[inline] + fn advance_pos(&self, pos: usize) -> Result { + if pos < self.buf.len() { + Ok(pos + 1) + } else { + Err(PdfError::EOF) + } + } + + #[inline] + pub fn next_as(&mut self) -> Result + where T: FromStr, T::Err: std::error::Error + Send + Sync + 'static + { + self.next().and_then(|word| word.to::()) + } + + #[inline] + pub fn get_pos(&self) -> usize { + self.pos + } + + #[inline] + pub fn new_substr(&self, mut range: Range) -> Substr<'a> { + // if the range is backward, fix it + // start is inclusive, end is exclusive. keep that in mind + if range.start > range.end { + let new_end = range.start + 1; + range.start = range.end + 1; + range.end = new_end; + } + + Substr { + file_offset: self.file_offset + range.start, + slice: &self.buf[range], + } + } + + /// Just a helper function for set_pos, set_pos_from_end and offset_pos. + #[inline] + pub fn set_pos(&mut self, wanted_pos: usize) -> Substr<'a> { + let new_pos = wanted_pos.min(self.buf.len()); + let range = if self.pos < new_pos { + self.pos..new_pos + } else { + new_pos..self.pos + }; + self.pos = new_pos; + self.new_substr(range) + } + + /// Returns the substr between the old and new positions + #[inline] + pub fn set_pos_from_end(&mut self, new_pos: usize) -> Substr<'a> { + self.set_pos(self.buf.len().saturating_sub(new_pos).saturating_sub(1)) + } + /// Returns the substr between the old and new positions + #[inline] + pub fn offset_pos(&mut self, offset: usize) -> Substr<'a> { + self.set_pos(self.pos.wrapping_add(offset)) + } + + /// Moves pos to start of next line. Returns the skipped-over substring. + #[allow(dead_code)] + pub fn seek_newline(&mut self) -> Substr{ + let start = self.pos; + while self.buf[self.pos] != b'\n' + && self.incr_pos() { } + self.incr_pos(); + + self.new_substr(start..self.pos) + } + + + // TODO: seek_substr and seek_substr_back should use next() or back()? + /// Moves pos to after the found `substr`. Returns Substr with traversed text if `substr` is found. + #[allow(dead_code)] + pub fn seek_substr(&mut self, substr: impl AsRef<[u8]>) -> Option> { + // + let substr = substr.as_ref(); + let start = self.pos; + let mut matched = 0; + loop { + if self.pos >= self.buf.len() { + return None + } + if self.buf[self.pos] == substr[matched] { + matched += 1; + } else { + matched = 0; + } + if matched == substr.len() { + break; + } + self.pos += 1; + } + self.pos += 1; + Some(self.new_substr(start..(self.pos - substr.len()))) + } + + //TODO perhaps seek_substr_back should, like back(), move to the first letter of the substr. + /// Searches for string backward. Moves to after the found `substr`, returns the traversed + /// Substr if found. + pub fn seek_substr_back(&mut self, substr: &[u8]) -> Result> { + let end = self.pos; + match self.buf[.. end].windows(substr.len()).rposition(|w| w == substr) { + Some(start) => { + self.pos = start + substr.len(); + Ok(self.new_substr(self.pos .. end)) + } + None => Err(PdfError::NotFound {word: String::from_utf8_lossy(substr).into() }) + } + } + + /// Read and return slice of at most n bytes. + #[allow(dead_code)] + pub fn read_n(&mut self, n: usize) -> Substr<'a> { + let start_pos = self.pos; + self.pos += n; + if self.pos >= self.buf.len() { + self.pos = self.buf.len() - 1; + } + if start_pos < self.buf.len() { + self.new_substr(start_pos..self.pos) + } else { + self.new_substr(0..0) + } + } + + /// Returns slice from current position to end. + #[inline] + pub fn get_remaining_slice(&self) -> &'a [u8] { + &self.buf[self.pos..] + } + + /// for debugging + pub fn ctx(&self) -> Cow { + String::from_utf8_lossy(&self.buf[self.pos.saturating_sub(40)..self.buf.len().min(self.pos+40)]) + } + + #[inline] + fn incr_pos(&mut self) -> bool { + if self.pos >= self.buf.len() - 1 { + false + } else { + self.pos += 1; + true + } + } + #[inline] + fn is_whitespace(&self, pos: usize) -> bool { + self.buf.get(pos).map(|&b| is_whitespace(b)).unwrap_or(false) + } + + #[inline] + fn is_delimiter(&self, pos: usize) -> bool { + self.buf.get(pos).map(|b| b"()<>[]{}/%".contains(b)).unwrap_or(false) + } + +} + + + +/// A slice from some original string - a lexeme. +#[derive(Copy, Clone, Debug)] +pub struct Substr<'a> { + slice: &'a [u8], + file_offset: usize, +} +impl<'a> Substr<'a> { + pub fn new + ?Sized>(data: &'a T, file_offset: usize) -> Self { + Substr { slice: data.as_ref(), file_offset } + } + // to: &S -> U. Possibly expensive conversion. + // as: &S -> &U. Cheap borrow conversion + // into: S -> U. Cheap ownership transfer conversion. + + #[allow(clippy::inherent_to_string)] + pub fn to_string(&self) -> String { + String::from_utf8_lossy(self.as_slice()).into() + } + pub fn to_name(&self) -> Result { + Ok(Name(std::str::from_utf8(self.as_slice())?.into())) + } + pub fn to_vec(&self) -> Vec { + self.slice.to_vec() + } + pub fn to(&self) -> Result + where T: FromStr, T::Err: std::error::Error + Send + Sync + 'static + { + std::str::from_utf8(self.slice)?.parse::().map_err(|e| PdfError::Parse { source: e.into() }) + } + pub fn is_integer(&self) -> bool { + if self.slice.len() == 0 { + return false; + } + let mut slice = self.slice; + if slice[0] == b'-' { + if slice.len() < 2 { + return false; + } + slice = &slice[1..]; + } + is_int(slice) + } + pub fn is_real_number(&self) -> bool { + self.real_number().is_some() + } + pub fn real_number(&self) -> Option { + if self.slice.len() == 0 { + return None; + } + let mut slice = self.slice; + if slice[0] == b'-' { + if slice.len() < 2 { + return None; + } + slice = &slice[1..]; + } + if let Some(i) = slice.iter().position(|&b| b == b'.') { + if !is_int(&slice[..i]) { + return None; + } + slice = &slice[i+1..]; + } + if let Some(len) = slice.iter().position(|&b| !b.is_ascii_digit()) { + if len == 0 { + return None; + } + let end = self.slice.len() - slice.len() + len; + Some(Substr { + file_offset: self.file_offset, + slice: &self.slice[..end] + }) + } else { + Some(*self) + } + } + + pub fn as_slice(&self) -> &'a [u8] { + self.slice + } + pub fn as_str(&self) -> Result<&str> { + std::str::from_utf8(self.slice).map_err(|e| PdfError::Parse { source: e.into() }) + } + + pub fn equals(&self, other: impl AsRef<[u8]>) -> bool { + self.slice == other.as_ref() + } + + pub fn reslice(&self, range: RangeFrom) -> Substr<'a> { + Substr { + file_offset: self.file_offset + range.start, + slice: &self.slice[range], + } + } + + pub fn file_range(&self) -> Range { + self.file_offset .. self.file_offset + self.slice.len() + } +} + +#[inline] +fn is_int(b: &[u8]) -> bool { + b.iter().all(|&b| b.is_ascii_digit()) +} +impl<'a> Deref for Substr<'a> { + type Target = [u8]; + fn deref(&self) -> &[u8] { + self.as_slice() + } +} +impl<'a> PartialEq<&[u8]> for Substr<'a> { + fn eq(&self, rhs: &&[u8]) -> bool { + self.equals(rhs) + } +} + +impl<'a> PartialEq<&str> for Substr<'a> { + fn eq(&self, rhs: &&str) -> bool { + self.equals(rhs.as_bytes()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use std::fs::File; + use std::io::{BufWriter, Write}; + + #[test] + fn test_boundary_rev() { + assert_eq!(boundary_rev(b" hello", 3, not(is_whitespace)), 1); + assert_eq!(boundary_rev(b" hello", 3, is_whitespace), 3); + } + + #[test] + fn test_boundary() { + assert_eq!(boundary(b" hello ", 3, not(is_whitespace)), 6); + assert_eq!(boundary(b" hello ", 3, is_whitespace), 3); + assert_eq!(boundary(b"01234 7orld", 5, is_whitespace), 7); + assert_eq!(boundary(b"01234 7orld", 7, is_whitespace), 7); + assert_eq!(boundary(b"q\n", 1, is_whitespace), 2); + } + + #[test] + fn test_substr() { + assert!(Substr::new("123", 0).is_real_number()); + assert!(Substr::new("123.", 0).is_real_number()); + assert!(Substr::new("123.45", 0).is_real_number()); + assert!(Substr::new(".45", 0).is_real_number()); + assert!(Substr::new("-.45", 0).is_real_number()); + assert!(!Substr::new("123.45", 0).is_integer()); + assert!(Substr::new("123", 0).is_integer()); + } + + #[test] + fn test_lexed() { + let file_data = fs::read("/home/kschuettler/Dokumente/TestFiles/18 - EVIDIS - Corrosao Irritacao ocular aguda.pdf").expect("File not found!"); + println!("{}", file_data.len()); + let mut lexer = Lexer::new(&*file_data); + let file = File::create("/tmp/pdf.txt").unwrap(); + + let mut writer = BufWriter::new(file); + let mut depth = false; + let mut stream = false; + let mut dict = 0; + while let Ok(s) = lexer.next() { + if stream && s.to_string().as_str() == "endstream" { + stream = false; + writer + .write("endstream\n".as_ref()) + .expect("Could not write to buffer"); + continue; + } else if stream { + continue; + } + + match s.to_string().as_str() { + "obj" => depth = true, + "endobj" => depth = false, + "stream" => { + stream = true; + writer + .write("stream ... ".as_ref()) + .expect("Could not write to buffer"); + continue; + } + "<<" => dict += 1, + ">>" => dict -= 1, + _ => (), + } + + writer.write(s.as_ref()).expect("Could not write to buffer"); + if dict == 0 { + writer + .write("\n".as_ref()) + .expect("Could not write to buffer"); + } else { + writer + .write(" ".as_ref()) + .expect("Could not write to buffer"); + } + + match s.to_string().as_str() { + _ => (), + } + } + writer.flush().expect("Could not flush buffer"); + } +} diff --git a/src-pdfrs/pdf/src/parser/lexer/str.rs b/src-pdfrs/pdf/src/parser/lexer/str.rs new file mode 100644 index 0000000..b7b21ff --- /dev/null +++ b/src-pdfrs/pdf/src/parser/lexer/str.rs @@ -0,0 +1,368 @@ +use std::iter::Iterator; +use crate::error::*; + +/// A lexer for PDF strings. Breaks the string up into single characters (`u8`) +/// It's also possible to get the number of indices of the original array that was traversed by the +/// Iterator. +/// +/// ``` +/// let mut string: Vec = Vec::new(); +/// let bytes_traversed = { +/// let mut string_lexer = StringLexer::new(lexer.get_remaining_slice()); +/// for character in string_lexer.iter() { +/// let character = character?; +/// string.push(character); +/// } +/// string_lexer.get_offset() as i64 +/// }; +/// // bytes_traversed now holds the number of bytes in the original array traversed. +/// ``` +/// + +#[derive(Clone)] +pub struct StringLexer<'a> { + pos: usize, // points to next byte + nested: i32, // How far in () we are nested + buf: &'a [u8], +} + +impl<'a> StringLexer<'a> { + /// `buf` should start right after the `(` delimiter, and may span all the way to EOF. StringLexer + /// will determine the end of the string. + pub fn new(buf: &'a [u8]) -> StringLexer<'a> { + StringLexer { + pos: 0, + nested: 0, + buf, + } + } + pub fn iter<'b>(&'b mut self) -> StringLexerIter<'a, 'b> { + StringLexerIter {lexer: self} + } + /// Get offset/pos from start of string + pub fn get_offset(&self) -> usize { + self.pos + } + + /// (mostly just used by Iterator, but might be useful) + pub fn next_lexeme(&mut self) -> Result> { + let c = self.next_byte()?; + match c { + b'\\' => { + let c = self.next_byte()?; + Ok( + match c { + b'n' => Some(b'\n'), + b'r' => Some(b'\r'), + b't' => Some(b'\t'), + b'b' => Some(b'\x08'), + b'f' => Some(b'\x0c'), + b'(' => Some(b'('), + b')' => Some(b')'), + b'\n' => { + // ignore end-of-line marker + if let Ok(b'\r') = self.peek_byte() { + let _ = self.next_byte(); + } + self.next_lexeme()? + } + b'\r' => { + // ignore end-of-line marker + if let Ok(b'\n') = self.peek_byte() { + let _ = self.next_byte(); + } + self.next_lexeme()? + } + b'\\' => Some(b'\\'), + + _ => { + self.back()?; + let _start = self.get_offset(); + let mut char_code: u16 = 0; + + // A character code must follow. 1-3 numbers. + for _ in 0..3 { + let c = self.peek_byte()?; + if (b'0'..=b'7').contains(&c) { + self.next_byte()?; + char_code = char_code * 8 + (c - b'0') as u16; + } else { + break; + } + } + Some(char_code as u8) + } + } + ) + }, + + b'(' => { + self.nested += 1; + Ok(Some(b'(')) + }, + b')' => { + self.nested -= 1; + if self.nested < 0 { + Ok(None) + } else { + Ok(Some(b')')) + } + }, + + c => Ok(Some(c)) + + } + } + + fn next_byte(&mut self) -> Result { + if self.pos < self.buf.len() { + self.pos += 1; + Ok(self.buf[self.pos-1]) + } else { + Err(PdfError::EOF) + } + } + fn back(&mut self) -> Result<()> { + if self.pos > 0 { + self.pos -= 1; + Ok(()) + } else { + Err(PdfError::EOF) + } + } + fn peek_byte(&mut self) -> Result { + if self.pos < self.buf.len() { + Ok(self.buf[self.pos]) + } else { + Err(PdfError::EOF) + } + } +} + +// "'a is valid for at least 'b" +pub struct StringLexerIter<'a: 'b, 'b> { + lexer: &'b mut StringLexer<'a>, +} + +impl<'a, 'b> Iterator for StringLexerIter<'a, 'b> { + type Item = Result; + fn next(&mut self) -> Option> { + match self.lexer.next_lexeme() { + Err(e) => Some(Err(e)), + Ok(Some(s)) => Some(Ok(s)), + Ok(None) => None, + } + } +} + +pub struct HexStringLexer<'a> { + pos: usize, // points to next byte + buf: &'a [u8], +} + +impl<'a> HexStringLexer<'a> { + /// `buf` should start right after the `<` delimiter, and may span all the way to EOF. + /// HexStringLexer will determine the end of the string. + pub fn new(buf: &'a [u8]) -> HexStringLexer<'a> { + HexStringLexer { pos: 0, buf } + } + + pub fn iter<'b>(&'b mut self) -> HexStringLexerIter<'a, 'b> { + HexStringLexerIter { lexer: self } + } + + /// Get offset/position from start of string + pub fn get_offset(&self) -> usize { + self.pos + } + + fn next_non_whitespace_char(&mut self) -> Result { + let mut byte = self.read_byte()?; + while byte == b' ' || byte == b'\t' || byte == b'\n' || byte == b'\r' || byte == b'\x0c' { + byte = self.read_byte()?; + } + Ok(byte) + } + + pub fn next_hex_byte(&mut self) -> Result> { + let c1 = self.next_non_whitespace_char()?; + let high_nibble: u8 = match c1 { + b'0' ..= b'9' => c1 - b'0', + b'A' ..= b'F' => c1 - b'A' + 0xA, + b'a' ..= b'f' => c1 - b'a' + 0xA, + b'>' => return Ok(None), + _ => return Err(PdfError::HexDecode { + pos: self.pos, + bytes: [c1, self.peek_byte().unwrap_or(0)] + }), + }; + let c2 = self.next_non_whitespace_char()?; + let low_nibble: u8 = match c2 { + b'0' ..= b'9' => c2 - b'0', + b'A' ..= b'F' => c2 - b'A' + 0xA, + b'a' ..= b'f' => c2 - b'a' + 0xA, + b'>' => { + self.back()?; + 0 + } + _ => return Err(PdfError::HexDecode { + pos: self.pos, + bytes: [c1, c2] + }), + }; + Ok(Some((high_nibble << 4) | low_nibble)) + } + + fn read_byte(&mut self) -> Result { + if self.pos < self.buf.len() { + self.pos += 1; + Ok(self.buf[self.pos - 1]) + } else { + Err(PdfError::EOF) + } + } + + fn back(&mut self) -> Result<()> { + if self.pos > 0 { + self.pos -= 1; + Ok(()) + } else { + Err(PdfError::EOF) + } + } + + fn peek_byte(&mut self) -> Result { + if self.pos < self.buf.len() { + Ok(self.buf[self.pos]) + } else { + Err(PdfError::EOF) + } + } +} + +pub struct HexStringLexerIter<'a: 'b, 'b> { + lexer: &'b mut HexStringLexer<'a>, +} + +impl<'a, 'b> Iterator for HexStringLexerIter<'a, 'b> { + type Item = Result; + + fn next(&mut self) -> Option> { + match self.lexer.next_hex_byte() { + Err(e) => Some(Err(e)), + Ok(Some(s)) => Some(Ok(s)), + Ok(None) => None, + } + } +} + +#[cfg(test)] +mod tests { + use crate::error::Result; + use crate::parser::lexer::{HexStringLexer, StringLexer}; + + #[test] + fn tests() { + let vec = b"a\\nb\\rc\\td\\(f/)\\\\hei)"; + let mut lexer = StringLexer::new(vec); + let lexemes: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(lexemes, b"a\nb\rc\td(f/"); + } + + #[test] + fn string_split_lines() { + { + let data = b"These \\\ntwo strings \\\nare the same.)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"These two strings are the same."); + } + { + let data = b"These \\\rtwo strings \\\rare the same.)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"These two strings are the same."); + } + { + let data = b"These \\\r\ntwo strings \\\r\nare the same.)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"These two strings are the same."); + } + } + + #[test] + fn octal_escape() { + { + let data = b"This string contains\\245two octal characters\\307.)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, &b"This string contains\xa5two octal characters\xc7."[..]); + } + { + let data = b"\\0053)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"\x053"); + } + { + let data = b"\\053)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"+"); + } + { + let data = b"\\53)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"+"); + } + { + // overflow is ignored + let data = b"\\541)"; + let mut lexer = StringLexer::new(data); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!(result, b"a"); + } + } + + #[test] + fn hex_test() { + let input = b"901FA3>"; + let mut lexer = HexStringLexer::new(input); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!( + result, + vec![ + b'\x90', + b'\x1f', + b'\xa3', + ] + ); + + let input = b"901FA>"; + let mut lexer = HexStringLexer::new(input); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!( + result, + vec![ + b'\x90', + b'\x1f', + b'\xa0', + ] + ); + + let input = b"1 9F\t5\r\n4\x0c62a>"; + let mut lexer = HexStringLexer::new(input); + let result: Vec = lexer.iter().map(Result::unwrap).collect(); + assert_eq!( + result, + vec![ + b'\x19', + b'\xf5', + b'\x46', + b'\x2a', + ] + ); + } +} diff --git a/src-pdfrs/pdf/src/parser/mod.rs b/src-pdfrs/pdf/src/parser/mod.rs new file mode 100644 index 0000000..5d7b502 --- /dev/null +++ b/src-pdfrs/pdf/src/parser/mod.rs @@ -0,0 +1,405 @@ +//! Basic functionality for parsing a PDF file. + +mod lexer; +mod parse_object; +mod parse_xref; + +pub use self::lexer::*; +pub use self::parse_object::*; +pub use self::parse_xref::*; + +use crate::error::*; +use crate::primitive::StreamInner; +use crate::primitive::{Primitive, Dictionary, PdfStream, PdfString}; +use crate::object::{ObjNr, GenNr, PlainRef, Resolve}; +use crate::crypt::Decoder; +use bitflags::bitflags; +use istring::{SmallBytes, SmallString, IBytes}; + +const MAX_DEPTH: usize = 20; + + +bitflags! { + #[repr(transparent)] + #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] + pub struct ParseFlags: u16 { + const INTEGER = 1 << 0; + const STREAM = 1 << 1; + const DICT = 1 << 2; + const NUMBER = 1 << 3; + const NAME = 1 << 4; + const ARRAY = 1 << 5; + const STRING = 1 << 6; + const BOOL = 1 << 7; + const NULL = 1 << 8; + const REF = 1 << 9; + const ANY = (1 << 10) - 1; + } +} + + +pub struct Context<'a> { + pub decoder: Option<&'a Decoder>, + pub id: PlainRef, +} +impl<'a> Context<'a> { + pub fn decrypt<'buf>(&self, data: &'buf mut [u8]) -> Result<&'buf [u8]> { + if let Some(decoder) = self.decoder { + decoder.decrypt(self.id, data) + } else { + Ok(data) + } + } + #[cfg(test)] + fn fake() -> Self { + Context { + decoder: None, + id: PlainRef { id: 0, gen: 0 } + } + } +} + +/// Can parse stream but only if its dictionary does not contain indirect references. +/// Use `parse_stream` if this is insufficient. +pub fn parse(data: &[u8], r: &impl Resolve, flags: ParseFlags) -> Result { + parse_with_lexer(&mut Lexer::new(data), r, flags) +} + +/// Recursive. Can parse stream but only if its dictionary does not contain indirect references. +/// Use `parse_stream` if this is not sufficient. +pub fn parse_with_lexer(lexer: &mut Lexer, r: &impl Resolve, flags: ParseFlags) -> Result { + parse_with_lexer_ctx(lexer, r, None, flags, MAX_DEPTH) +} + +fn parse_dictionary_object(lexer: &mut Lexer, r: &impl Resolve, ctx: Option<&Context>, max_depth: usize) -> Result { + let mut dict = Dictionary::default(); + loop { + // Expect a Name (and Object) or the '>>' delimiter + let token = t!(lexer.next()); + if token.starts_with(b"/") { + let key = token.reslice(1..).to_name()?; + let obj = t!(parse_with_lexer_ctx(lexer, r, ctx, ParseFlags::ANY, max_depth)); + dict.insert(key, obj); + } else if token.equals(b">>") { + break; + } else { + err!(PdfError::UnexpectedLexeme{ pos: lexer.get_pos(), lexeme: token.to_string(), expected: "/ or >>"}); + } + } + Ok(dict) +} + +fn parse_stream_object(dict: Dictionary, lexer: &mut Lexer, r: &impl Resolve, ctx: &Context) -> Result { + t!(lexer.next_stream()); + + let length = match dict.get("Length") { + Some(&Primitive::Integer(n)) if n >= 0 => n as usize, + Some(&Primitive::Reference(reference)) => t!(t!(r.resolve_flags(reference, ParseFlags::INTEGER, 1)).as_usize()), + Some(other) => err!(PdfError::UnexpectedPrimitive { expected: "unsigned Integer or Reference", found: other.get_debug_name() }), + None => err!(PdfError::MissingEntry { typ: "", field: "Length".into() }), + }; + + let stream_substr = lexer.read_n(length); + + if stream_substr.len() != length { + err!(PdfError::EOF) + } + + // Finish + t!(lexer.next_expect("endstream")); + + Ok(PdfStream { + inner: StreamInner::InFile { + id: ctx.id, + file_range: stream_substr.file_range(), + }, + info: dict, + }) +} + +#[inline] +fn check(flags: ParseFlags, allowed: ParseFlags) -> Result<(), PdfError> { + if !flags.intersects(allowed) { + return Err(PdfError::PrimitiveNotAllowed { allowed, found: flags }); + } + Ok(()) +} + +/// Recursive. Can parse stream but only if its dictionary does not contain indirect references. +/// Use `parse_stream` if this is not sufficient. +pub fn parse_with_lexer_ctx(lexer: &mut Lexer, r: &impl Resolve, ctx: Option<&Context>, flags: ParseFlags, max_depth: usize) -> Result { + let pos = lexer.get_pos(); + match _parse_with_lexer_ctx(lexer, r, ctx, flags, max_depth) { + Ok(r) => Ok(r), + Err(e) => { + lexer.set_pos(pos); + Err(e) + } + } +} +fn _parse_with_lexer_ctx(lexer: &mut Lexer, r: &impl Resolve, ctx: Option<&Context>, flags: ParseFlags, max_depth: usize) -> Result { + + let input = lexer.get_remaining_slice(); + let first_lexeme = t!(lexer.next(), std::str::from_utf8(input)); + + let obj = if first_lexeme.equals(b"<<") { + check(flags, ParseFlags::DICT)?; + + if max_depth == 0 { + return Err(PdfError::MaxDepth); + } + let dict = t!(parse_dictionary_object(lexer, r, ctx, max_depth-1)); + // It might just be the dictionary in front of a stream. + if t!(lexer.peek()).equals(b"stream") { + let ctx = ctx.ok_or(PdfError::PrimitiveNotAllowed { allowed: ParseFlags::STREAM, found: flags })?; + Primitive::Stream(t!(parse_stream_object(dict, lexer, r, ctx))) + } else { + Primitive::Dictionary(dict) + } + } else if first_lexeme.is_integer() { + // May be Integer or Reference + check(flags, ParseFlags::INTEGER | ParseFlags::REF)?; + + // First backup position + let pos_bk = lexer.get_pos(); + + let second_lexeme = t!(lexer.next()); + if second_lexeme.is_integer() { + let third_lexeme = t!(lexer.next()); + if third_lexeme.equals(b"R") { + // It is indeed a reference to an indirect object + check(flags, ParseFlags::REF)?; + Primitive::Reference (PlainRef { + id: t!(first_lexeme.to::()), + gen: t!(second_lexeme.to::()), + }) + } else { + check(flags, ParseFlags::INTEGER)?; + // We are probably in an array of numbers - it's not a reference anyway + lexer.set_pos(pos_bk); // (roll back the lexer first) + Primitive::Integer(t!(first_lexeme.to::())) + } + } else { + check(flags, ParseFlags::INTEGER)?; + // It is but a number + lexer.set_pos(pos_bk); // (roll back the lexer first) + Primitive::Integer(t!(first_lexeme.to::())) + } + } else if let Some(s) = first_lexeme.real_number() { + check(flags, ParseFlags::NUMBER)?; + // Real Number + Primitive::Number (t!(s.to::(), s.to_string())) + } else if first_lexeme.starts_with(b"/") { + check(flags, ParseFlags::NAME)?; + // Name + + let mut rest: &[u8] = &first_lexeme.reslice(1..); + let s = if rest.contains(&b'#') { + let mut s = IBytes::new(); + while let Some(idx) = rest.iter().position(|&b| b == b'#') { + use crate::enc::decode_nibble; + use std::convert::TryInto; + let [hi, lo]: [u8; 2] = rest.get(idx+1 .. idx+3).ok_or(PdfError::EOF)?.try_into().unwrap(); + let byte = match (decode_nibble(lo), decode_nibble(hi)) { + (Some(low), Some(high)) => low | high << 4, + _ => return Err(PdfError::HexDecode { pos: idx, bytes: [hi, lo] }), + }; + s.extend_from_slice(&rest[..idx]); + s.push(byte); + rest = &rest[idx+3..]; + } + s.extend_from_slice(rest); + SmallBytes::from(s.as_slice()) + } else { + SmallBytes::from(rest) + }; + + Primitive::Name(SmallString::from_utf8(s)?) + } else if first_lexeme.equals(b"[") { + check(flags, ParseFlags::ARRAY)?; + if max_depth == 0 { + return Err(PdfError::MaxDepth); + } + let mut array = Vec::new(); + // Array + loop { + // Exit if closing delimiter + if lexer.peek()?.equals(b"]") { + break; + } + + let element = t!(parse_with_lexer_ctx(lexer, r, ctx, ParseFlags::ANY, max_depth-1)); + array.push(element); + } + t!(lexer.next()); // Move beyond closing delimiter + + Primitive::Array (array) + } else if first_lexeme.equals(b"(") { + check(flags, ParseFlags::STRING)?; + let mut string = IBytes::new(); + + let bytes_traversed = { + let mut string_lexer = StringLexer::new(lexer.get_remaining_slice()); + for character in string_lexer.iter() { + string.push(t!(character)); + } + string_lexer.get_offset() + }; + // Advance to end of string + lexer.offset_pos(bytes_traversed); + // decrypt it + if let Some(ctx) = ctx { + string = t!(ctx.decrypt(&mut string)).into(); + } + Primitive::String (PdfString::new(string)) + } else if first_lexeme.equals(b"<") { + check(flags, ParseFlags::STRING)?; + let mut string = IBytes::new(); + + let bytes_traversed = { + let mut hex_string_lexer = HexStringLexer::new(lexer.get_remaining_slice()); + for byte in hex_string_lexer.iter() { + string.push(t!(byte)); + } + hex_string_lexer.get_offset() + }; + // Advance to end of string + lexer.offset_pos(bytes_traversed); + + // decrypt it + if let Some(ctx) = ctx { + string = t!(ctx.decrypt(&mut string)).into(); + } + Primitive::String (PdfString::new(string)) + } else if first_lexeme.equals(b"true") { + check(flags, ParseFlags::BOOL)?; + Primitive::Boolean (true) + } else if first_lexeme.equals(b"false") { + check(flags, ParseFlags::BOOL)?; + Primitive::Boolean (false) + } else if first_lexeme.equals(b"null") { + check(flags, ParseFlags::NULL)?; + Primitive::Null + } else { + err!(PdfError::UnknownType {pos: lexer.get_pos(), first_lexeme: first_lexeme.to_string(), rest: lexer.read_n(50).to_string()}); + }; + + // trace!("Read object"; "Obj" => format!("{}", obj)); + + Ok(obj) +} + + +pub fn parse_stream(data: &[u8], resolve: &impl Resolve, ctx: &Context) -> Result { + parse_stream_with_lexer(&mut Lexer::new(data), resolve, ctx) +} + + +fn parse_stream_with_lexer(lexer: &mut Lexer, r: &impl Resolve, ctx: &Context) -> Result { + let first_lexeme = t!(lexer.next()); + + let obj = if first_lexeme.equals(b"<<") { + let dict = t!(parse_dictionary_object(lexer, r, None, MAX_DEPTH)); + // It might just be the dictionary in front of a stream. + if t!(lexer.peek()).equals(b"stream") { + let ctx = Context { + decoder: None, + id: ctx.id + }; + t!(parse_stream_object(dict, lexer, r, &ctx)) + } else { + err!(PdfError::UnexpectedPrimitive { expected: "Stream", found: "Dictionary" }); + } + } else { + err!(PdfError::UnexpectedPrimitive { expected: "Stream", found: "something else" }); + }; + + Ok(obj) +} + +#[cfg(test)] +mod tests { + #[test] + fn dict_with_empty_name_as_value() { + use crate::object::NoResolve; + use super::{ParseFlags, Context}; + { + let data = b"<>>>"; + let primitive = super::parse(data, &NoResolve, ParseFlags::DICT).unwrap(); + let dict = primitive.into_dictionary().unwrap(); + + assert_eq!(dict.len(), 1); + let app_dict = dict.get("App").unwrap().clone().into_dictionary().unwrap(); + assert_eq!(app_dict.len(), 1); + let name = app_dict.get("Name").unwrap().as_name().unwrap(); + assert_eq!(name, ""); + } + + { + let data = b"<>>>stream\nendstream\n"; + let stream = super::parse_stream(data, &NoResolve, &Context::fake()).unwrap(); + let dict = stream.info; + + assert_eq!(dict.len(), 2); + let app_dict = dict.get("App").unwrap().clone().into_dictionary().unwrap(); + assert_eq!(app_dict.len(), 1); + let name = app_dict.get("Name").unwrap().as_name().unwrap(); + assert_eq!(name, ""); + } + } + + #[test] + fn dict_with_empty_name_as_key() { + use crate::object::NoResolve; + use super::{ParseFlags, Context}; + + { + let data = b"<>"; + let primitive = super::parse(data, &NoResolve, ParseFlags::DICT).unwrap(); + let dict = primitive.into_dictionary().unwrap(); + + assert_eq!(dict.len(), 1); + assert!(dict.get("").unwrap().as_bool().unwrap()); + } + + { + let data = b"<>stream\nendstream\n"; + let stream = super::parse_stream(data, &NoResolve, &Context::fake()).unwrap(); + let dict = stream.info; + + assert_eq!(dict.len(), 2); + assert!(dict.get("").unwrap().as_bool().unwrap()); + } + } + + #[test] + fn empty_array() { + use crate::object::NoResolve; + use super::ParseFlags; + + let data = b"[]"; + let primitive = super::parse(data, &NoResolve, ParseFlags::ARRAY).unwrap(); + let array = primitive.into_array().unwrap(); + assert!(array.is_empty()); + } + + #[test] + fn compact_array() { + use crate::object::NoResolve; + use crate::primitive::{Primitive, PdfString}; + use super::lexer::Lexer; + use super::*; + let mut lx = Lexer::new(b"[(Complete L)20(egend for Physical and P)20(olitical Maps)]TJ"); + assert_eq!(parse_with_lexer(&mut lx, &NoResolve, ParseFlags::ANY).unwrap(), + Primitive::Array(vec![ + Primitive::String(PdfString::new("Complete L".into())), + Primitive::Integer(20), + Primitive::String(PdfString::new("egend for Physical and P".into())), + Primitive::Integer(20), + Primitive::String(PdfString::new("olitical Maps".into())) + ]) + ); + assert_eq!(lx.next().unwrap().as_str().unwrap(), "TJ"); + assert!(lx.next().unwrap_err().is_eof()); + } +} diff --git a/src-pdfrs/pdf/src/parser/parse_object.rs b/src-pdfrs/pdf/src/parser/parse_object.rs new file mode 100644 index 0000000..52ba5f6 --- /dev/null +++ b/src-pdfrs/pdf/src/parser/parse_object.rs @@ -0,0 +1,56 @@ +// Considering whether to impl Object and IndirectObject here. +// + +use crate::parser::{lexer::*, MAX_DEPTH}; +use crate::error::*; +use crate::primitive::{Primitive, PdfStream}; +use crate::parser::{parse_with_lexer_ctx, parse_stream_with_lexer, Context, ParseFlags}; +use crate::object::*; +use crate::crypt::Decoder; + +/// Parses an Object starting at the current position of `lexer`. Almost as +/// `Reader::parse_object`, but this function does not take `Reader`, at the expense that it +/// cannot dereference + +pub fn parse_indirect_object(lexer: &mut Lexer, r: &impl Resolve, decoder: Option<&Decoder>, flags: ParseFlags) -> Result<(PlainRef, Primitive)> { + let id = PlainRef { + id: t!(lexer.next()).to::()?, + gen: t!(lexer.next()).to::()?, + }; + lexer.next_expect("obj")?; + + let ctx = Context { + decoder, + id, + }; + let obj = t!(parse_with_lexer_ctx(lexer, r, Some(&ctx), flags, MAX_DEPTH)); + + if r.options().allow_missing_endobj { + let pos = lexer.get_pos(); + if let Err(e) = lexer.next_expect("endobj") { + warn!("error parsing obj {} {}: {:?}", id.id, id.gen, e); + lexer.set_pos(pos); + } + } else { + t!(lexer.next_expect("endobj")); + } + + Ok((id, obj)) +} +pub fn parse_indirect_stream(lexer: &mut Lexer, r: &impl Resolve, decoder: Option<&Decoder>) -> Result<(PlainRef, PdfStream)> { + let id = PlainRef { + id: t!(lexer.next()).to::()?, + gen: t!(lexer.next()).to::()?, + }; + lexer.next_expect("obj")?; + + let ctx = Context { + decoder, + id, + }; + let stm = t!(parse_stream_with_lexer(lexer, r, &ctx)); + + t!(lexer.next_expect("endobj")); + + Ok((id, stm)) +} diff --git a/src-pdfrs/pdf/src/parser/parse_xref.rs b/src-pdfrs/pdf/src/parser/parse_xref.rs new file mode 100644 index 0000000..de287fd --- /dev/null +++ b/src-pdfrs/pdf/src/parser/parse_xref.rs @@ -0,0 +1,144 @@ +use crate::error::*; +use crate::parser::lexer::Lexer; +use crate::xref::{XRef, XRefSection, XRefInfo}; +use crate::primitive::{Primitive, Dictionary}; +use crate::object::*; +use crate::parser::{parse_with_lexer, ParseFlags}; +use crate::parser::parse_object::{parse_indirect_stream}; +use std::convert::TryInto; + +// Just the part of Parser which reads xref sections from xref stream. +/// Takes `&mut &[u8]` so that it can "consume" data as it reads +fn parse_xref_section_from_stream(first_id: u32, mut num_entries: usize, width: &[usize], data: &mut &[u8], resolve: &impl Resolve) -> Result { + let mut entries = Vec::new(); + let [w0, w1, w2]: [usize; 3] = width.try_into().map_err(|_| other!("invalid xref length array"))?; + if num_entries * (w0 + w1 + w2) > data.len() { + if resolve.options().allow_xref_error { + warn!("not enough xref data. truncating."); + num_entries = data.len() / (w0 + w1 + w2); + } else { + bail!("not enough xref data"); + } + } + for _ in 0..num_entries { + // println!("{:?}", &data[.. width.iter().map(|&i| i as usize).sum()]); + // TODO Check if width[i] are 0. Use default values from the PDF references. + let _type = if w0 == 0 { + 1 + } else { + read_u64_from_stream(w0, data)? + }; + let field1 = read_u64_from_stream(w1, data)?; + let field2 = read_u64_from_stream(w2, data)?; + + let entry = + match _type { + 0 => XRef::Free {next_obj_nr: field1 as ObjNr, gen_nr: field2 as GenNr}, + 1 => XRef::Raw {pos: field1 as usize, gen_nr: field2 as GenNr}, + 2 => XRef::Stream {stream_id: field1 as ObjNr, index: field2 as usize}, + _ => return Err(PdfError::XRefStreamType {found: _type}), // TODO: Should actually just be seen as a reference to the null object + }; + entries.push(entry); + } + Ok(XRefSection { + first_id, + entries, + }) +} +/// Helper to read an integer with a certain amount of bytes `width` from stream. +fn read_u64_from_stream(width: usize, data: &mut &[u8]) -> Result { + if width > std::mem::size_of::() { + return Err(PdfError::Other { msg: format!("xref stream entry has invalid width {}", width) }); + } + if width > data.len() { + return Err(PdfError::Other { msg: format!("xref stream entry has width {} but only {} bytes left to read", width, data.len()) }); + } + let mut result = 0; + for i in (0..width).rev() { + let base = 8 * i; // (width, 0] + let c: u8 = data[0]; + *data = &data[1..]; // Consume byte + result += u64::from(c) << base; + } + Ok(result) +} + + +/// Reads xref sections (from stream) and trailer starting at the position of the Lexer. +pub fn parse_xref_stream_and_trailer(lexer: &mut Lexer, resolve: &impl Resolve) -> Result<(Vec, Dictionary)> { + let xref_stream = t!(parse_indirect_stream(lexer, resolve, None)).1; + let trailer = if t!(lexer.next()) == "trailer" { + let trailer = t!(parse_with_lexer(lexer, resolve, ParseFlags::DICT)); + t!(trailer.into_dictionary()) + } else { + xref_stream.info.clone() + }; + + let xref_stream = t!(Stream::::from_primitive(Primitive::Stream(xref_stream), resolve)); + let mut data_left = &*t!(xref_stream.data(resolve)); + + let width = &xref_stream.w; + + let index = &xref_stream.index; + + if index.len() % 2 != 0 { + return Err(PdfError::Other { msg: format!("xref stream has {} elements which is not an even number", index.len()) }); + } + + let mut sections = Vec::new(); + for (first_id, num_objects) in index.chunks_exact(2).map(|c| (c[0], c[1])) { + let section = t!(parse_xref_section_from_stream(first_id, num_objects as usize, width, &mut data_left, resolve)); + sections.push(section); + } + + Ok((sections, trailer)) +} + + +/// Reads xref sections (from table) and trailer starting at the position of the Lexer. +pub fn parse_xref_table_and_trailer(lexer: &mut Lexer, resolve: &impl Resolve) -> Result<(Vec, Dictionary)> { + let mut sections = Vec::new(); + + // Keep reading subsections until we hit `trailer` + while lexer.peek()? != "trailer" { + let start_id = t!(lexer.next_as::()); + let num_ids = t!(lexer.next_as::()); + + let mut section = XRefSection::new(start_id); + + for i in 0..num_ids { + let w1 = t!(lexer.next()); + if w1 == "trailer" { + return Err(PdfError::Other { msg: format!("xref table declares {} entries, but only {} follow.", num_ids, i) }); + } + let w2 = t!(lexer.next()); + let w3 = t!(lexer.next()); + if w3 == "f" { + section.add_free_entry(t!(w1.to::()), t!(w2.to::())); + } else if w3 == "n" { + section.add_inuse_entry(t!(w1.to::()), t!(w2.to::())); + } else { + return Err(PdfError::UnexpectedLexeme {pos: lexer.get_pos(), lexeme: w3.to_string(), expected: "f or n"}); + } + } + sections.push(section); + } + + t!(lexer.next_expect("trailer")); + let trailer = t!(parse_with_lexer(lexer, resolve, ParseFlags::DICT)); + let trailer = t!(trailer.into_dictionary()); + + Ok((sections, trailer)) +} + +pub fn read_xref_and_trailer_at(lexer: &mut Lexer, resolve: &impl Resolve) -> Result<(Vec, Dictionary)> { + let next_word = t!(lexer.next()); + if next_word == "xref" { + // Read classic xref table + parse_xref_table_and_trailer(lexer, resolve) + } else { + // Read xref stream + lexer.back()?; + parse_xref_stream_and_trailer(lexer, resolve) + } +} diff --git a/src-pdfrs/pdf/src/path.rs b/src-pdfrs/pdf/src/path.rs new file mode 100644 index 0000000..68621e4 --- /dev/null +++ b/src-pdfrs/pdf/src/path.rs @@ -0,0 +1,82 @@ +use mint::Point2; +type Point = Point2; + +pub enum FillMode { + NonZero, + EvenOdd +} + +struct PathBuilder { + out: W, + current: Point +} +impl PathBuilder { + pub fn new

(writer: W, start: P) -> PathBuilder + where P: Into + { + PathBuilder { + out: writer, + current: start + } + } + + /// Begin a new subpath by moving the current point to `p`, + /// omitting any connecting line segment. If + /// the previous path construction operator in the current path + /// was also m, the new m overrides it; no vestige of the + /// previous m operation remains in the path. + pub fn move

(&mut self, p: P) { + let p = p.into(); + writeln!(self.out, "{} {} m", p.x, p.y); + self.current = p; + } + /// Append a straight line segment from the current point to the + /// point `p`. The new current point shall be `p`. + pub fn line

(&mut self, p: P) { + let p = p.into(); + writeln!(self.out, "{} {} l", p.x, p.y); + self.current = p; + } + + /// Append a quadratic Bézier curve to the current path. + /// The curve shall extend from the current point to the point ´p´, + /// using `c` as the Bézier control point. + /// The new current point shall be `p`. + /// + /// NOTE: The quadratic Bézier curve is translated into a cubic Bézier curve, + /// since PDF does not allow the former. + pub fn quadratic

(&mut self, c: P, p: P) { + let (p1, p2) = (p1.into(), p2.into()); + let c1 = (2./3.) * c + (1./3.) * self.current; + let c2 = (2./3.) * c + (1./3.) * p; + writen!(self.out, "{} {} {} {} {} {} c", c1.x, c1.y, c2.x, c2.y, p.x, p.y); + self.current = p; + } + + /// Append a cubic Bézier curve to the current path. + /// The curve shall extend from the current point to the point ´p´, + /// using `c1` and `c2` as the Bézier control points. + /// The new current point shall be `p`. + pub fn cubic

(&mut self, c1: P, c2: P, p: P) { + let (c1, c2, p) = (c1.into(), c2.into(), p.into()); + if Some(c1) == self.current { + writeln!(self.out, "{} {} {} {} v", c2.x, c2.y, p.x, p.y); + } else if Some(c2) == self.current { + writeln!(self.out, "{} {} {} {} y", c1.x, c1.y, p.x, p.y); + } else { + writen!(self.out, "{} {} {} {} {} {} c", c1.x, c1.y, c2.x, c2.y, p.x, p.y); + } + self.current = p; + } + + pub fn close(&mut self) { + writeln!(self.out, "h"); + } + + pub fn fill(&mut self, mode: FillMode) { + match mode { + FillMode::NonZero => writeln!(out, "f"), + FillMode::EvenOdd => writeln!(out, "f*") + } + } +} diff --git a/src-pdfrs/pdf/src/primitive.rs b/src-pdfrs/pdf/src/primitive.rs new file mode 100644 index 0000000..00b51bd --- /dev/null +++ b/src-pdfrs/pdf/src/primitive.rs @@ -0,0 +1,897 @@ +use crate::error::*; +use crate::object::{PlainRef, Resolve, Object, NoResolve, ObjectWrite, Updater, DeepClone, Cloner}; + +use std::sync::Arc; +use std::{str, fmt, io}; +use std::ops::{Index, Range}; +use std::ops::Deref; +use std::convert::TryInto; +use std::borrow::{Borrow, Cow}; +use indexmap::IndexMap; +use itertools::Itertools; +use istring::{SmallString, IBytes}; +use datasize::DataSize; + +#[derive(Clone, Debug, PartialEq)] +pub enum Primitive { + Null, + Integer (i32), + Number (f32), + Boolean (bool), + String (PdfString), + Stream (PdfStream), + Dictionary (Dictionary), + Array (Vec), + Reference (PlainRef), + Name (SmallString), +} +impl DataSize for Primitive { + const IS_DYNAMIC: bool = true; + const STATIC_HEAP_SIZE: usize = std::mem::size_of::(); + + fn estimate_heap_size(&self) -> usize { + match self { + Primitive::String(ref s) => s.estimate_heap_size(), + Primitive::Stream(ref s) => s.estimate_heap_size(), + Primitive::Dictionary(ref d) => d.estimate_heap_size(), + Primitive::Array(ref arr) => arr.estimate_heap_size(), + Primitive::Name(ref s) => s.estimate_heap_size(), + _ => 0 + } + } +} + +impl fmt::Display for Primitive { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Primitive::Null => write!(f, "null"), + Primitive::Integer(i) => i.fmt(f), + Primitive::Number(n) => n.fmt(f), + Primitive::Boolean(b) => b.fmt(f), + Primitive::String(ref s) => write!(f, "{:?}", s), + Primitive::Stream(_) => write!(f, "stream"), + Primitive::Dictionary(ref d) => d.fmt(f), + Primitive::Array(ref arr) => write!(f, "[{}]", arr.iter().format(", ")), + Primitive::Reference(r) => write!(f, "@{}", r.id), + Primitive::Name(ref s) => write!(f, "/{}", s) + } + } +} +impl Primitive { + pub fn serialize(&self, out: &mut impl io::Write) -> Result<()> { + match self { + Primitive::Null => write!(out, "null")?, + Primitive::Integer(i) => write!(out, "{}", i)?, + Primitive::Number(n) => write!(out, "{}", n)?, + Primitive::Boolean(b) => write!(out, "{}", b)?, + Primitive::String(ref s) => s.serialize(out)?, + Primitive::Stream(ref s) => s.serialize(out)?, + Primitive::Dictionary(ref d) => d.serialize(out)?, + Primitive::Array(ref arr) => serialize_list(arr, out)?, + Primitive::Reference(r) => write!(out, "{} {} R", r.id, r.gen)?, + Primitive::Name(ref s) => serialize_name(s, out)?, + } + Ok(()) + } + pub fn array(i: I, update: &mut U) -> Result + where O: ObjectWrite, I: Iterator, + T: Borrow, U: Updater + { + i.map(|t| t.borrow().to_primitive(update)).collect::>().map(Primitive::Array) + } + pub fn name(name: impl Into) -> Primitive { + Primitive::Name(name.into()) + } +} + +fn serialize_list(arr: &[Primitive], out: &mut impl io::Write) -> Result<()> { + let mut parts = arr.iter(); + write!(out, "[")?; + if let Some(first) = parts.next() { + first.serialize(out)?; + } + for p in parts { + write!(out, " ")?; + p.serialize(out)?; + } + write!(out, "]")?; + Ok(()) +} + +pub fn serialize_name(s: &str, out: &mut impl io::Write) -> Result<()> { + write!(out, "/")?; + for b in s.chars() { + match b { + '\\' | '(' | ')' => write!(out, r"\")?, + c if c > '~' => panic!("only ASCII"), + _ => () + } + write!(out, "{}", b)?; + } + Ok(()) +} + +/// Primitive Dictionary type. +#[derive(Default, Clone, PartialEq)] +pub struct Dictionary { + dict: IndexMap +} +impl Dictionary { + pub fn new() -> Dictionary { + Dictionary { dict: IndexMap::new()} + } + pub fn len(&self) -> usize { + self.dict.len() + } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + pub fn get(&self, key: &str) -> Option<&Primitive> { + self.dict.get(key) + } + pub fn insert(&mut self, key: impl Into, val: impl Into) -> Option { + self.dict.insert(key.into(), val.into()) + } + pub fn iter(&self) -> impl Iterator { + self.dict.iter() + } + pub fn remove(&mut self, key: &str) -> Option { + self.dict.remove(key) + } + /// like remove, but takes the name of the calling type and returns `PdfError::MissingEntry` if the entry is not found + pub fn require(&mut self, typ: &'static str, key: &str) -> Result { + self.remove(key).ok_or( + PdfError::MissingEntry { + typ, + field: key.into() + } + ) + } + /// assert that the given key/value pair is in the dictionary (`required=true`), + /// or the key is not present at all (`required=false`) + pub fn expect(&self, typ: &'static str, key: &str, value: &str, required: bool) -> Result<()> { + match self.dict.get(key) { + Some(ty) => { + let ty = ty.as_name()?; + if ty != value { + Err(PdfError::KeyValueMismatch { + key: key.into(), + value: value.into(), + found: ty.into() + }) + } else { + Ok(()) + } + }, + None if required => Err(PdfError::MissingEntry { typ, field: key.into() }), + None => Ok(()) + } + } + pub fn append(&mut self, other: Dictionary) { + self.dict.extend(other.dict); + } +} +impl DataSize for Dictionary { + const IS_DYNAMIC: bool = true; + const STATIC_HEAP_SIZE: usize = std::mem::size_of::(); + fn estimate_heap_size(&self) -> usize { + self.iter().map(|(k, v)| 16 + k.estimate_heap_size() + v.estimate_heap_size()).sum() + } +} +impl ObjectWrite for Dictionary { + fn to_primitive(&self, _update: &mut impl Updater) -> Result { + Ok(Primitive::Dictionary(self.clone())) + } +} +impl DeepClone for Dictionary { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + Ok(Dictionary { + dict: self.dict.iter() + .map(|(key, value)| Ok((key.clone(), value.deep_clone(cloner)?))) + .try_collect::<_, _, PdfError>()? + }) + } +} +impl Deref for Dictionary { + type Target = IndexMap; + fn deref(&self) -> &IndexMap { + &self.dict + } +} +impl Dictionary { + fn serialize(&self, out: &mut impl io::Write) -> Result<()> { + writeln!(out, "<<")?; + for (key, val) in self.iter() { + write!(out, "{} ", key)?; + val.serialize(out)?; + writeln!(out)?; + } + writeln!(out, ">>")?; + Ok(()) + } +} +impl fmt::Debug for Dictionary { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + writeln!(f, "{{")?; + for (k, v) in self { + writeln!(f, "{:>15}: {}", k, v)?; + } + write!(f, "}}") + } +} +impl fmt::Display for Dictionary { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "<{}>", self.iter().format_with(", ", |(k, v), f| f(&format_args!("{}={}", k, v)))) + } +} +impl<'a> Index<&'a str> for Dictionary { + type Output = Primitive; + fn index(&self, idx: &'a str) -> &Primitive { + self.dict.index(idx) + } +} +impl IntoIterator for Dictionary { + type Item = (Name, Primitive); + type IntoIter = indexmap::map::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self.dict.into_iter() + } +} +impl<'a> IntoIterator for &'a Dictionary { + type Item = (&'a Name, &'a Primitive); + type IntoIter = indexmap::map::Iter<'a, Name, Primitive>; + fn into_iter(self) -> Self::IntoIter { + self.dict.iter() + } +} + +/// Primitive Stream (as opposed to the higher-level `Stream`) +#[derive(Clone, Debug, PartialEq, DataSize)] +pub struct PdfStream { + pub info: Dictionary, + pub (crate) inner: StreamInner, +} + +#[derive(Clone, Debug, PartialEq, DataSize)] +pub enum StreamInner { + InFile { id: PlainRef, file_range: Range }, + Pending { data: Arc<[u8]> }, +} +impl Object for PdfStream { + fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result { + match p { + Primitive::Stream (stream) => Ok(stream), + Primitive::Reference (r) => PdfStream::from_primitive(resolve.resolve(r)?, resolve), + p => Err(PdfError::UnexpectedPrimitive {expected: "Stream", found: p.get_debug_name()}) + } + } +} +impl ObjectWrite for PdfStream { + fn to_primitive(&self, update: &mut impl Updater) -> Result { + Ok(self.clone().into()) + } +} +impl PdfStream { + pub fn serialize(&self, out: &mut impl io::Write) -> Result<()> { + self.info.serialize(out)?; + + writeln!(out, "stream")?; + match self.inner { + StreamInner::InFile { .. } => { + unimplemented!() + } + StreamInner::Pending { ref data } => { + out.write_all(data)?; + } + } + writeln!(out, "\nendstream")?; + Ok(()) + } + pub fn raw_data(&self, resolve: &impl Resolve) -> Result> { + match self.inner { + StreamInner::InFile { id, ref file_range } => resolve.stream_data(id, file_range.clone()), + StreamInner::Pending { ref data } => Ok(data.clone()) + } + } +} +impl DeepClone for PdfStream { + fn deep_clone(&self, cloner: &mut impl Cloner) -> Result { + let data = match self.inner { + StreamInner::InFile { id, ref file_range } => cloner.stream_data(id, file_range.clone())?, + StreamInner::Pending { ref data } => data.clone() + }; + Ok(PdfStream { + info: self.info.deep_clone(cloner)?, inner: StreamInner::Pending { data } + }) + } +} + + +macro_rules! unexpected_primitive { + ($expected:ident, $found:expr) => ( + Err(PdfError::UnexpectedPrimitive { + expected: stringify!($expected), + found: $found + }) + ) +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Ord, PartialOrd, DataSize)] +pub struct Name(pub SmallString); +impl Name { + #[inline] + pub fn as_str(&self) -> &str { + &self.0 + } +} +impl Deref for Name { + type Target = str; + #[inline] + fn deref(&self) -> &str { + &self.0 + } +} +impl From for Name { + #[inline] + fn from(s: String) -> Name { + Name(s.into()) + } +} +impl From for Name { + #[inline] + fn from(s: SmallString) -> Name { + Name(s) + } +} +impl<'a> From<&'a str> for Name { + #[inline] + fn from(s: &'a str) -> Name { + Name(s.into()) + } +} +impl PartialEq for Name { + #[inline] + fn eq(&self, rhs: &str) -> bool { + self.as_str() == rhs + } +} +impl fmt::Display for Name { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "/{}", self.0) + } +} +impl std::borrow::Borrow for Name { + #[inline] + fn borrow(&self) -> &str { + self.0.as_str() + } +} +#[test] +fn test_name() { + use std::collections::hash_map::DefaultHasher; + use std::hash::{Hash, Hasher}; + + let s = "Hello World!"; + let hasher = DefaultHasher::new(); + + fn hash(hasher: &DefaultHasher, value: impl Hash) -> u64 { + let mut hasher = hasher.clone(); + value.hash(&mut hasher); + hasher.finish() + } + assert_eq!(hash(&hasher, Name(s.into())), hash(&hasher, s)); +} + +/// Primitive String type. +#[derive(Clone, PartialEq, Eq, Hash, DataSize)] +pub struct PdfString { + pub data: IBytes, +} +impl fmt::Debug for PdfString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "\"")?; + for &b in self.data.as_slice() { + match b { + b'"' => write!(f, "\\\"")?, + b' ' ..= b'~' => write!(f, "{}", b as char)?, + o @ 0 ..= 7 => write!(f, "\\{}", o)?, + x => write!(f, "\\x{:02x}", x)? + } + } + write!(f, "\"") + } +} +impl Object for PdfString { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p { + Primitive::String (string) => Ok(string), + Primitive::Reference(id) => PdfString::from_primitive(r.resolve(id)?, &NoResolve), + _ => unexpected_primitive!(String, p.get_debug_name()), + } + } +} +impl ObjectWrite for PdfString { + fn to_primitive(&self, _update: &mut impl Updater) -> Result { + Ok(Primitive::String(self.clone())) + } +} + +impl PdfString { + pub fn serialize(&self, out: &mut impl io::Write) -> Result<()> { + if self.data.iter().any(|&b| b >= 0x80) { + write!(out, "<")?; + for &b in self.data.as_slice() { + write!(out, "{:02x}", b)?; + } + write!(out, ">")?; + } else { + write!(out, r"(")?; + for &b in self.data.as_slice() { + match b { + b'\\' | b'(' | b')' => write!(out, r"\")?, + _ => () + } + out.write_all(&[b])?; + } + write!(out, r")")?; + } + Ok(()) + } +} +impl AsRef<[u8]> for PdfString { + fn as_ref(&self) -> &[u8] { + self.as_bytes() + } +} + +impl PdfString { + pub fn new(data: IBytes) -> PdfString { + PdfString { + data + } + } + pub fn as_bytes(&self) -> &[u8] { + &self.data + } + pub fn into_bytes(self) -> IBytes { + self.data + } + /// without encoding information the PdfString cannot be decoded into a String + /// therefore only lossy decoding is possible replacing unknown characters. + /// For decoding correctly see + /// pdf_tools/src/lib.rs + pub fn to_string_lossy(&self) -> String { + if self.data.starts_with(&[0xfe, 0xff]) { + crate::font::utf16be_to_string_lossy(&self.data[2..]) + } + else { + String::from_utf8_lossy(&self.data).into() + } + } + /// without encoding information the PdfString cannot be sensibly decoded into a String + /// converts to a Rust String but only works for valid UTF-8, UTF-16BE and ASCII characters + /// if invalid bytes found an Error is returned + pub fn to_string(&self) -> Result { + if self.data.starts_with(&[0xfe, 0xff]) { + Ok(String::from(std::str::from_utf8(crate::font::utf16be_to_string(&self.data[2..])?.as_bytes()) + .map_err(|_| PdfError::Utf8Decode)?)) + } + else { + Ok(String::from(std::str::from_utf8(&self.data) + .map_err(|_| PdfError::Utf8Decode)?)) + } + } +} +impl<'a> From<&'a str> for PdfString { + fn from(value: &'a str) -> Self { + PdfString { data: value.into() } + } +} + +// TODO: +// Noticed some inconsistency here.. I think to_* and as_* should not take Resolve, and not accept +// Reference. Only from_primitive() for the respective type resolves References. +impl Primitive { + /// For debugging / error messages: get the name of the variant + pub fn get_debug_name(&self) -> &'static str { + match *self { + Primitive::Null => "Null", + Primitive::Integer (..) => "Integer", + Primitive::Number (..) => "Number", + Primitive::Boolean (..) => "Boolean", + Primitive::String (..) => "String", + Primitive::Stream (..) => "Stream", + Primitive::Dictionary (..) => "Dictionary", + Primitive::Array (..) => "Array", + Primitive::Reference (..) => "Reference", + Primitive::Name (..) => "Name", + } + } + /// resolve the primitive if it is a refernce, otherwise do nothing + pub fn resolve(self, r: &impl Resolve) -> Result { + match self { + Primitive::Reference(id) => r.resolve(id), + _ => Ok(self) + } + } + pub fn as_integer(&self) -> Result { + match *self { + Primitive::Integer(n) => Ok(n), + ref p => unexpected_primitive!(Integer, p.get_debug_name()) + } + } + pub fn as_u8(&self) -> Result { + match *self { + Primitive::Integer(n) if (0..256).contains(&n) => Ok(n as u8), + Primitive::Integer(_) => bail!("invalid integer"), + ref p => unexpected_primitive!(Integer, p.get_debug_name()) + } + } + pub fn as_u32(&self) -> Result { + match *self { + Primitive::Integer(n) if n >= 0 => Ok(n as u32), + Primitive::Integer(_) => bail!("negative integer"), + ref p => unexpected_primitive!(Integer, p.get_debug_name()) + } + } + pub fn as_usize(&self) -> Result { + match *self { + Primitive::Integer(n) if n >= 0 => Ok(n as usize), + Primitive::Integer(_) => bail!("negative integer"), + ref p => unexpected_primitive!(Integer, p.get_debug_name()) + } + } + pub fn as_number(&self) -> Result { + match *self { + Primitive::Integer(n) => Ok(n as f32), + Primitive::Number(f) => Ok(f), + ref p => unexpected_primitive!(Number, p.get_debug_name()) + } + } + pub fn as_bool(&self) -> Result { + match *self { + Primitive::Boolean (b) => Ok(b), + ref p => unexpected_primitive!(Number, p.get_debug_name()) + } + } + pub fn as_name(&self) -> Result<&str> { + match self { + Primitive::Name(ref name) => Ok(name.as_str()), + p => unexpected_primitive!(Name, p.get_debug_name()) + } + } + pub fn as_string(&self) -> Result<&PdfString> { + match self { + Primitive::String(ref data) => Ok(data), + p => unexpected_primitive!(String, p.get_debug_name()) + } + } + pub fn as_array(&self) -> Result<&[Primitive]> { + match self { + Primitive::Array(ref v) => Ok(v), + p => unexpected_primitive!(Array, p.get_debug_name()) + } + } + pub fn into_reference(self) -> Result { + match self { + Primitive::Reference(id) => Ok(id), + p => unexpected_primitive!(Reference, p.get_debug_name()) + } + } + pub fn into_array(self) -> Result> { + match self { + Primitive::Array(v) => Ok(v), + p => unexpected_primitive!(Array, p.get_debug_name()) + } + } + pub fn into_dictionary(self) -> Result { + match self { + Primitive::Dictionary(dict) => Ok(dict), + p => unexpected_primitive!(Dictionary, p.get_debug_name()) + } + } + pub fn into_name(self) -> Result { + match self { + Primitive::Name(name) => Ok(Name(name)), + p => unexpected_primitive!(Name, p.get_debug_name()) + } + } + pub fn into_string(self) -> Result { + match self { + Primitive::String(data) => Ok(data), + p => unexpected_primitive!(String, p.get_debug_name()) + } + } + pub fn to_string_lossy(&self) -> Result { + let s = self.as_string()?; + Ok(s.to_string_lossy()) + } + pub fn to_string(&self) -> Result { + let s = self.as_string()?; + s.to_string() + } + pub fn into_stream(self, _r: &impl Resolve) -> Result { + match self { + Primitive::Stream (s) => Ok(s), + p => unexpected_primitive!(Stream, p.get_debug_name()) + } + } +} + +impl From for Primitive { + fn from(x: i32) -> Primitive { + Primitive::Integer(x) + } +} +impl From for Primitive { + fn from(x: f32) -> Primitive { + Primitive::Number(x) + } +} +impl From for Primitive { + fn from(x: bool) -> Primitive { + Primitive::Boolean(x) + } +} +impl From for Primitive { + fn from(Name(s): Name) -> Primitive { + Primitive::Name(s) + } +} +impl From for Primitive { + fn from(x: PdfString) -> Primitive { + Primitive::String (x) + } +} +impl From for Primitive { + fn from(x: PdfStream) -> Primitive { + Primitive::Stream (x) + } +} +impl From for Primitive { + fn from(x: Dictionary) -> Primitive { + Primitive::Dictionary (x) + } +} +impl From> for Primitive { + fn from(x: Vec) -> Primitive { + Primitive::Array (x) + } +} + +impl From for Primitive { + fn from(x: PlainRef) -> Primitive { + Primitive::Reference (x) + } +} +impl<'a> TryInto for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result { + self.as_number() + } +} +impl<'a> TryInto for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result { + self.as_integer() + } +} +impl<'a> TryInto for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result { + match self { + Primitive::Name(s) => Ok(Name(s.clone())), + p => Err(PdfError::UnexpectedPrimitive { + expected: "Name", + found: p.get_debug_name() + }) + } + } +} +impl<'a> TryInto<&'a [Primitive]> for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result<&'a [Primitive]> { + self.as_array() + } +} +impl<'a> TryInto<&'a [u8]> for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result<&'a [u8]> { + match *self { + Primitive::Name(ref s) => Ok(s.as_bytes()), + Primitive::String(ref s) => Ok(s.as_bytes()), + ref p => Err(PdfError::UnexpectedPrimitive { + expected: "Name or String", + found: p.get_debug_name() + }) + } + } +} +impl<'a> TryInto> for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result> { + match *self { + Primitive::Name(ref s) => Ok(Cow::Borrowed(s)), + Primitive::String(ref s) => Ok(Cow::Owned(s.to_string_lossy())), + ref p => Err(PdfError::UnexpectedPrimitive { + expected: "Name or String", + found: p.get_debug_name() + }) + } + } +} +impl<'a> TryInto for &'a Primitive { + type Error = PdfError; + fn try_into(self) -> Result { + match *self { + Primitive::Name(ref s) => Ok(s.as_str().into()), + Primitive::String(ref s) => Ok(s.to_string_lossy()), + ref p => Err(PdfError::UnexpectedPrimitive { + expected: "Name or String", + found: p.get_debug_name() + }) + } + } +} + +fn parse_or(buffer: &str, range: Range, default: T) -> T { + buffer.get(range) + .map(|s| str::parse::(s).unwrap_or_else(|_| default.clone())) + .unwrap_or(default) +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Date { + pub year: u16, + pub month: u8, + pub day: u8, + pub hour: u8, + pub minute: u8, + pub second: u8, + pub rel: TimeRel, + pub tz_hour: u8, + pub tz_minute: u8, +} + +#[derive(Clone, Debug, Copy, PartialEq, Eq)] +pub enum TimeRel { + Earlier, + Later, + Universal +} +datasize::non_dynamic_const_heap_size!(Date, std::mem::size_of::()); + +impl Object for Date { + fn from_primitive(p: Primitive, r: &impl Resolve) -> Result { + match p.resolve(r)? { + Primitive::String (PdfString {data}) => { + let s = str::from_utf8(&data)?; + if s.starts_with("D:") { + let year = match s.get(2..6) { + Some(year) => { + str::parse::(year)? + } + None => bail!("Missing obligatory year in date") + }; + + let (time, rel, zone) = match s.find(['+', '-', 'Z']) { + Some(p) => { + let rel = match &s[p..p+1] { + "-" => TimeRel::Earlier, + "+" => TimeRel::Later, + "Z" => TimeRel::Universal, + _ => unreachable!() + }; + (&s[..p], rel, &s[p+1..]) + } + None => (s, TimeRel::Universal, "") + }; + + let month = parse_or(time, 6..8, 1); + let day = parse_or(time, 8..10, 1); + let hour = parse_or(time, 10..12, 0); + let minute = parse_or(time, 12..14, 0); + let second = parse_or(time, 14..16, 0); + let tz_hour = parse_or(zone, 0..2, 0); + let tz_minute = parse_or(zone, 3..5, 0); + + Ok(Date { + year, month, day, + hour, minute, second, + tz_hour, tz_minute, + rel + }) + } else { + bail!("Failed parsing date"); + } + } + p => unexpected_primitive!(String, p.get_debug_name()), + } + } +} + +impl ObjectWrite for Date { + fn to_primitive(&self, _update: &mut impl Updater) -> Result { + let Date { + year, month, day, + hour, minute, second, + tz_hour, tz_minute, rel, + } = *self; + if year > 9999 || day > 99 || hour > 23 || minute >= 60 || second >= 60 || tz_hour >= 24 || tz_minute >= 60 { + bail!("not a valid date"); + } + let o = match rel { + TimeRel::Earlier => "-", + TimeRel::Later => "+", + TimeRel::Universal => "Z" + }; + + let s = format!("D:{year:04}{month:02}{day:02}{hour:02}{minute:02}{second:02}{o}{tz_hour:02}'{tz_minute:02}"); + Ok(Primitive::String(PdfString { data: s.into() })) + } +} + +#[cfg(test)] +mod tests { + use crate::{primitive::{PdfString, TimeRel}, object::{NoResolve, Object}}; + + use super::Date; + #[test] + fn utf16be_string() { + let s = PdfString::new([0xfe, 0xff, 0x20, 0x09].as_slice().into()); + assert_eq!(s.to_string_lossy(), "\u{2009}"); + } + + #[test] + fn utf16be_invalid_string() { + let s = PdfString::new([0xfe, 0xff, 0xd8, 0x34].as_slice().into()); + let repl_ch = String::from(std::char::REPLACEMENT_CHARACTER); + assert_eq!(s.to_string_lossy(), repl_ch); + } + + #[test] + fn utf16be_invalid_bytelen() { + let s = PdfString::new([0xfe, 0xff, 0xd8, 0x34, 0x20].as_slice().into()); + let repl_ch = String::from(std::char::REPLACEMENT_CHARACTER); + assert_eq!(s.to_string_lossy(), repl_ch); + } + + #[test] + fn pdfstring_lossy_vs_ascii() { + // verify UTF-16-BE fails on invalid + let s = PdfString::new([0xfe, 0xff, 0xd8, 0x34].as_slice().into()); + assert!(s.to_string().is_err()); // FIXME verify it is a PdfError::Utf16Decode + // verify UTF-16-BE supports umlauts + let s = PdfString::new([0xfe, 0xff, 0x00, 0xe4 /*ä*/].as_slice().into()); + assert_eq!(s.to_string_lossy(), "ä"); + assert_eq!(s.to_string().unwrap(), "ä"); + // verify valid UTF-8 bytestream with umlaut works + let s = PdfString::new([b'm', b'i', b't', 0xc3, 0xa4 /*ä*/].as_slice().into()); + assert_eq!(s.to_string_lossy(), "mitä"); + assert_eq!(s.to_string().unwrap(), "mitä"); + // verify valid ISO-8859-1 bytestream with umlaut fails + let s = PdfString::new([b'm', b'i', b't', 0xe4/*ä in latin1*/].as_slice().into()); + let repl_ch = ['m', 'i', 't', std::char::REPLACEMENT_CHARACTER].iter().collect::(); + assert_eq!(s.to_string_lossy(), repl_ch); + assert!(s.to_string().is_err()); // FIXME verify it is a PdfError::Utf16Decode + } + + #[test] + fn date() { + let p = PdfString::from("D:199812231952-08'00"); + let d = Date::from_primitive(p.into(), &NoResolve); + + let d2 = Date { + year: 1998, + month: 12, + day: 23, + hour: 19, + minute: 52, + second: 00, + rel: TimeRel::Earlier, + tz_hour: 8, + tz_minute: 0 + }; + assert_eq!(d.unwrap(), d2); + } +} diff --git a/src-pdfrs/pdf/src/repair.rs b/src-pdfrs/pdf/src/repair.rs new file mode 100644 index 0000000..3352bd1 --- /dev/null +++ b/src-pdfrs/pdf/src/repair.rs @@ -0,0 +1,42 @@ + +fn build_xref_table() { + warn!("can't read xref table: {:?}", e); + let start_offset = t!(backend.locate_start_offset()); + let mut lexer = Lexer::new(t!(backend.read(..))); + let mut objects = Vec::new(); + + (|| -> Result<()> { loop { + let offset = lexer.get_pos(); + let w1 = t!(lexer.next()); + let w2 = t!(lexer.next()); + let w3 = t!(lexer.next_expect("obj")); + try_opt!(lexer.seek_substr("endobj")); + + objects.push((t!(w1.to::()), t!(w2.to::()), offset)); + }})(); + + objects.sort_unstable(); + let mut first_id = objects.first().map(|&(n, _, _)| n).unwrap_or(0); + let mut last_id = objects.last().map(|&(n, _, _)| n).unwrap_or(0); + let mut xref = XRefTable::new(1 + last_id - first_id); + for &(obj_nr, gen_nr, offset) in objects.iter() { + for n in first_id + 1 .. obj_nr { + xref.push(XRef::Free { next_obj_nr: obj_nr, gen_nr: 0 }); + } + if obj_nr == last_id { + warn!("duplicate obj_nr {}", obj_nr); + continue; + } + xref.push(XRef::Raw { + pos: offset - start_offset, + gen_nr + }); + last_id = obj_nr; + } + + return t!(Err(e)); +} + +fn build_catalog() { + +} diff --git a/src-pdfrs/pdf/src/xref.rs b/src-pdfrs/pdf/src/xref.rs new file mode 100644 index 0000000..c40cd6b --- /dev/null +++ b/src-pdfrs/pdf/src/xref.rs @@ -0,0 +1,237 @@ +use std::fmt::{Debug, Formatter}; +use crate::error::*; +use crate::object::*; +use crate as pdf; +use datasize::DataSize; + +/////////////////////////// +// Cross-reference table // +/////////////////////////// + +#[derive(Copy, Clone, Debug)] +pub enum XRef { + /// Not currently used. + Free { + next_obj_nr: ObjNr, + gen_nr: GenNr + }, + + /// In use. + Raw { + pos: usize, + gen_nr: GenNr + }, + /// In use and compressed inside an Object Stream + Stream { + stream_id: ObjNr, + index: usize, + }, + + Promised, + + Invalid +} + +impl XRef { + pub fn get_gen_nr(&self) -> GenNr { + match *self { + XRef::Free {gen_nr, ..} + | XRef::Raw {gen_nr, ..} => gen_nr, + XRef::Stream { .. } => 0, // TODO I think these always have gen nr 0? + _ => panic!() + } + } +} + + +/// Runtime lookup table of all objects +#[derive(Clone)] +pub struct XRefTable { + // None means that it's not specified, and should result in an error if used + // Thought: None could also mean Free? + entries: Vec +} + + +impl XRefTable { + pub fn new(num_objects: ObjNr) -> XRefTable { + let mut entries = Vec::new(); + entries.resize(num_objects as usize, XRef::Invalid); + entries.push(XRef::Free { next_obj_nr: 0, gen_nr: 0xffff }); + XRefTable { + entries, + } + } + + pub fn iter(&self) -> impl Iterator + '_ { + self.entries.iter().enumerate() + .filter(|(_, xref)| matches!(xref, XRef::Raw { .. } | XRef::Stream { .. } )) + .map(|(i, _)| i as u32) + } + pub fn iter_real(&self) -> impl Iterator + '_ { + self.entries.iter() + } + + pub fn get(&self, id: ObjNr) -> Result { + match self.entries.get(id as usize) { + Some(&entry) => Ok(entry), + None => Err(PdfError::UnspecifiedXRefEntry {id}), + } + } + pub fn set(&mut self, id: ObjNr, r: XRef) { + self.entries[id as usize] = r; + } + pub fn len(&self) -> usize { + self.entries.len() + } + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + pub fn push(&mut self, new_entry: XRef) { + self.entries.push(new_entry); + } + pub fn num_entries(&self) -> usize { + self.entries.len() + } + pub fn max_field_widths(&self) -> (u64, u64) { + let mut max_a = 0; + let mut max_b = 0; + for &e in &self.entries { + let (a, b) = match e { + XRef::Raw { pos, gen_nr } => (pos as u64, gen_nr), + XRef::Free { next_obj_nr, gen_nr } => (next_obj_nr, gen_nr), + XRef::Stream { stream_id, index } => (stream_id, index as u64), + _ => continue + }; + max_a = max_a.max(a); + max_b = max_b.max(b); + } + (max_a, max_b) + } + + pub fn add_entries_from(&mut self, section: XRefSection) -> Result<()> { + for (i, &entry) in section.entries() { + if let Some(dst) = self.entries.get_mut(i) { + // Early return if the entry we have has larger or equal generation number + let should_be_updated = match *dst { + XRef::Raw { gen_nr: gen, .. } | XRef::Free { gen_nr: gen, .. } + => entry.get_gen_nr() > gen, + XRef::Stream { .. } | XRef::Invalid + => true, + x => bail!("found {:?}", x) + }; + if should_be_updated { + *dst = entry; + } + } + } + Ok(()) + } + + pub fn write_stream(&self, size: usize) -> Result> { + let (max_a, max_b) = self.max_field_widths(); + let a_w = byte_len(max_a); + let b_w = byte_len(max_b); + + let mut data = Vec::with_capacity((1 + a_w + b_w) * size); + for &x in self.entries.iter().take(size) { + let (t, a, b) = match x { + XRef::Free { next_obj_nr, gen_nr } => (0, next_obj_nr, gen_nr), + XRef::Raw { pos, gen_nr } => (1, pos as u64, gen_nr), + XRef::Stream { stream_id, index } => (2, stream_id, index as u64), + x => bail!("invalid xref entry: {:?}", x) + }; + data.push(t); + data.extend_from_slice(&a.to_be_bytes()[8 - a_w ..]); + data.extend_from_slice(&b.to_be_bytes()[8 - b_w ..]); + } + let info = XRefInfo { + size: size as u32, + index: vec![0, size as u32], + prev: None, + w: vec![1, a_w, b_w], + }; + + Ok(Stream::new(info, data)) + } +} + +fn byte_len(n: u64) -> usize { + (64 + 8 - 1 - n.leading_zeros()) as usize / 8 + (n == 0) as usize +} + +impl Debug for XRefTable { + fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { + for (i, entry) in self.entries.iter().enumerate() { + match *entry { + XRef::Free {next_obj_nr, gen_nr} => { + writeln!(f, "{:4}: {:010} {:05} f", i, next_obj_nr, gen_nr)? + }, + XRef::Raw {pos, gen_nr} => { + writeln!(f, "{:4}: {:010} {:05} n", i, pos, gen_nr)? + }, + XRef::Stream {stream_id, index} => { + writeln!(f, "{:4}: in stream {}, index {}", i, stream_id, index)? + }, + XRef::Promised => { + writeln!(f, "{:4}: Promised?", i)? + }, + XRef::Invalid => { + writeln!(f, "{:4}: Invalid!", i)? + } + } + } + Ok(()) + } +} + +/// As found in PDF files +#[derive(Debug)] +pub struct XRefSection { + pub first_id: u32, + pub entries: Vec, +} + + +impl XRefSection { + pub fn new(first_id: u32) -> XRefSection { + XRefSection { + first_id, + entries: Vec::new(), + } + } + pub fn add_free_entry(&mut self, next_obj_nr: ObjNr, gen_nr: GenNr) { + self.entries.push(XRef::Free{next_obj_nr, gen_nr}); + } + pub fn add_inuse_entry(&mut self, pos: usize, gen_nr: GenNr) { + self.entries.push(XRef::Raw{pos, gen_nr}); + } + pub fn entries(&self) -> impl Iterator { + self.entries.iter().enumerate().map(move |(i, e)| (i + self.first_id as usize, e)) + } +} + + +#[derive(Object, ObjectWrite, Debug, DataSize)] +#[pdf(Type = "XRef")] +pub struct XRefInfo { + // XRefStream fields + #[pdf(key = "Size")] + pub size: u32, + + // + #[pdf(key = "Index", default = "vec![0, size]")] + /// Array of pairs of integers for each subsection, (first object number, number of entries). + /// Default value (assumed when None): `(0, self.size)`. + pub index: Vec, + + #[pdf(key = "Prev")] + prev: Option, + + #[pdf(key = "W")] + pub w: Vec, +} + +// read_xref_table +// read_xref_stream +// read_xref_and_trailer_at diff --git a/src-pdfrs/pdf/tests/integration.rs b/src-pdfrs/pdf/tests/integration.rs new file mode 100644 index 0000000..9ce9dad --- /dev/null +++ b/src-pdfrs/pdf/tests/integration.rs @@ -0,0 +1,126 @@ +use std::str; +use std::path::{Path, PathBuf}; +use pdf::file::FileOptions; +use pdf::object::*; +use pdf::parser::{parse, ParseFlags}; +use glob::glob; + +macro_rules! run { + ($e:expr) => ( + match $e { + Ok(v) => v, + Err(e) => { + panic!("{}", e); + } + } + ) +} + +fn files() -> PathBuf { + Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().join("files") +} +fn file_path(s: &str) -> PathBuf { + files().join(s) +} +fn dir_pdfs(path: PathBuf) -> impl Iterator { + path.read_dir().unwrap() + .filter_map(|r| r.ok()) + .map(|e| e.path()) + .filter(|p| p.extension().map(|e| e == "pdf").unwrap_or(false)) +} + +#[test] +fn open_file() { + let _ = run!(FileOptions::uncached().open(file_path("example.pdf"))); + #[cfg(all(feature = "mmap", feature = "cache"))] + let _ = run!({ + use memmap2::Mmap; + let file = std::fs::File::open(file_path!("example.pdf")).expect("can't open file"); + let mmap = unsafe { Mmap::map(&file).expect("can't mmap file") }; + FileOptions::cached().load(mmap) + }); +} + +#[cfg(feature="cache")] +#[test] +fn read_pages() { + for path in dir_pdfs(files()) { + println!("\n == Now testing `{}` ==", path.to_str().unwrap()); + + let path = path.to_str().unwrap(); + let file = run!(FileOptions::cached().open(path)); + for i in 0 .. file.num_pages() { + println!("Read page {}", i); + let _ = file.get_page(i); + } + } +} + +#[test] +fn user_password() { + for path in dir_pdfs(file_path("password_protected")) { + println!("\n\n == Now testing `{}` ==\n", path.to_str().unwrap()); + + let path = path.to_str().unwrap(); + let file = run!(FileOptions::uncached().password(b"userpassword").open(path)); + for i in 0 .. file.num_pages() { + println!("\nRead page {}", i); + let _ = file.get_page(i); + } + } +} + +#[test] +fn owner_password() { + for path in dir_pdfs(file_path("password_protected")) { + println!("\n\n == Now testing `{}` ==\n", path.to_str().unwrap()); + + let path = path.to_str().unwrap(); + let file = run!(FileOptions::uncached().password(b"ownerpassword").open(path)); + for i in 0 .. file.num_pages() { + println!("\nRead page {}", i); + let _ = file.get_page(i); + } + } +} + +// Test for invalid PDFs found by fuzzing. +// We don't care if they give an Err or Ok, as long as they don't panic. +#[cfg(feature="cache")] +#[test] +fn invalid_pdfs() { + for path in dir_pdfs(file_path("invalid")) { + let path = path.to_str().unwrap(); + println!("\n\n == Now testing `{}` ==\n", path); + + match FileOptions::cached().open(path) { + Ok(file) => { + for i in 0 .. file.num_pages() { + let _ = file.get_page(i); + } + } + Err(_) => { + continue; + } + } + } +} + +#[cfg(feature="cache")] +#[test] +fn parse_objects_from_stream() { + use pdf::object::NoResolve; + let file = run!(FileOptions::cached().open(file_path("xelatex.pdf"))); + let resolver = file.resolver(); + + // .. we know that object 13 of that file is an ObjectStream + let obj_stream: RcRef = run!(resolver.get(Ref::new(PlainRef {id: 13, gen: 0}))); + for i in 0..obj_stream.n_objects() { + let (data, range) = run!(obj_stream.get_object_slice(i, &resolver)); + let slice = &data[range]; + println!("Object slice #{}: {}\n", i, str::from_utf8(slice).unwrap()); + run!(parse(slice, &NoResolve, ParseFlags::ANY)); + } +} + +// TODO test decoding diff --git a/src-pdfrs/pdf/tests/write.rs b/src-pdfrs/pdf/tests/write.rs new file mode 100644 index 0000000..133853a --- /dev/null +++ b/src-pdfrs/pdf/tests/write.rs @@ -0,0 +1,35 @@ +// TODO: commented out to make it compile +/* +extern crate pdf; + +use pdf::file::File; +use pdf::types::*; +use pdf::stream::ObjectStream; + +fn main() { + let mut file = File::new(Vec::new()); + + let page_tree_promise = file.promise(); + let mut page_tree = PageTree::root(); + let mut page = Page::new((&page_tree_promise).into()); + page.media_box = Some(Rect { + left: 0., + right: 100., + top: 0., + bottom: 200. + }); + + // create the content stream + let content = ObjectStream::new(&mut file); + + // add stream to file + let content_ref = file.add(content); + + page_tree.add(file.add(PagesNode::Page(page)).unwrap()); + + let catalog = Catalog::new(file.fulfill(page_tree_promise, page_tree).unwrap()); + + let catalog_ref = file.add(catalog).unwrap(); + file.finish(catalog_ref); +} +*/ diff --git a/src-pdfrs/pdf/tests/xref.rs b/src-pdfrs/pdf/tests/xref.rs new file mode 100644 index 0000000..03a9010 --- /dev/null +++ b/src-pdfrs/pdf/tests/xref.rs @@ -0,0 +1,12 @@ +use pdf::file::FileOptions; + +#[test] +fn infinite_loop_invalid_file() { + assert!(FileOptions::uncached().load(b"startxref%PDF-".as_ref()).is_err()); +} + +#[test] +fn ending_angle_bracket() { + assert!(FileOptions::uncached().load(b"%PDF-startxref>".as_ref()).is_err()); + assert!(FileOptions::uncached().load(b"%PDF-startxref<".as_ref()).is_err()); +} diff --git a/src-pdfrs/pdf_derive/Cargo.toml b/src-pdfrs/pdf_derive/Cargo.toml new file mode 100644 index 0000000..4d6bff1 --- /dev/null +++ b/src-pdfrs/pdf_derive/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "pdf_derive" +version = "0.2.0" +authors = ["Erlend Langseth <3rlendhl@gmail.com>", "Sebastian Köln "] +homepage = "https://github.com/pdf-rs" +repository = "https://github.com/pdf-rs/pdf_derive" +description = "helper for pdf-rs." +license = "MIT" +edition = "2018" +publish = false + +[dependencies] +syn = { version = "2", features = ["full", "extra-traits"] } +proc-macro2 = "1.0.24" +quote = "1" + +[lib] +proc-macro = true diff --git a/src-pdfrs/pdf_derive/src/lib.rs b/src-pdfrs/pdf_derive/src/lib.rs new file mode 100644 index 0000000..861ca97 --- /dev/null +++ b/src-pdfrs/pdf_derive/src/lib.rs @@ -0,0 +1,845 @@ +//! `pdf_derive` provides a proc macro to derive the Object trait from the `pdf` crate. +//! # Usage +//! There are several ways to derive Object on a struct or enum: +//! ## 1. Struct from PDF Dictionary +//! +//! A lot of dictionary types defined in the PDF 1.7 reference have a finite amount of possible +//! fields. Each of these are usually either required or optional. The latter is achieved by using +//! a `Option` or `Vec` as type of a field. +//! +//! Usually, dictionary types +//! require that the entry `/Type` is some specific string. By default, `pdf_derive` assumes that +//! this should equal the name of the input struct. This can be overridden by setting the `Type` +//! attribute equal to either the expected value of the `/Type` entry, or to `false` in order to +//! omit the type check completely. +//! +//! Check similar to that of `/Type` can also be specified in the same manner. (but the `Type` +//! attribute is special because it accepts a bool). +//! +//! Examples: +//! +//! ```ignore +//! #[derive(Object)] +//! #[pdf(Type="XObject", Subtype="Image")] +//! /// A variant of XObject +//! pub struct ImageDictionary { +//! #[pdf(key="Width")] +//! width: i32, +//! #[pdf(key="Height")] +//! height: i32, +//! // [...] +//! } +//! ``` +//! +//! This enforces that the dictionary's `/Type` entry is present and equals `/XObject`, and that the +//! `/Subtype` entry is present and equals `/Image`. +//! +//! Each field in the struct needs to implement `Object`. Implementation is provided already for +//! common types like i32, f32, usize, bool, String (from Primitive::Name), Option and Vec. +//! The two latter are initialized to default if the entry isn't found in the input dictionary. +//! Option is therefore frequently used for fields that are optional according to the PDF +//! reference. Vec can also be used for optional fields that can also be arrays (there are quite +//! a few of those in the PDF specs - one or many). However, as stated, it accepts absense of the +//! entry, so **required** fields of type array aren't yet facilitated for. +//! +//! Lastly, for each field, it's possible to define a default value by setting the `default` +//! attribute to a string that can parse as Rust code. +//! +//! Example: +//! +//! ```ignore +//! #[derive(Object)] +//! #[pdf(Type = "XRef")] +//! pub struct XRefInfo { +//! #[pdf(key = "Filter")] +//! filter: Vec, +//! #[pdf(key = "Size")] +//! pub size: i32, +//! #[pdf(key = "Index", default = "vec![0, size]")] +//! pub index: Vec, +//! // [...] +//! } +//! ``` +//! +//! +//! ## 2. Struct from PDF Stream +//! PDF Streams consist of a stream dictionary along with the stream itself. It is assumed that all +//! structs that want to derive Object where the primitive it converts from is a stream, +//! have a field `info: T`, where `T: Object`, and a field `data: Vec`. +//! +//! Deriving an Object that converts from Primitive::Stream, the flag `is_stream` is required in +//! the proc macro attributes. +//! +//! ## 3. Enum from PDF Name +//! Example: +//! +//! ```ignore +//! #[derive(Object, Debug)] +//! pub enum StreamFilter { +//! ASCIIHexDecode, +//! ASCII85Decode, +//! LZWDecode, +//! FlateDecode, +//! JPXDecode, +//! DCTDecode, +//! } +//! ``` +//! +//! In this case, `StreamFilter::from_primitive(primitive)` will return Ok(_) only if the primitive +//! is `Primitive::Name` and matches one of the enum variants +#![recursion_limit="128"] + +extern crate proc_macro; +extern crate syn; +#[macro_use] +extern crate quote; + +use proc_macro::{TokenStream}; +use proc_macro2::{TokenStream as TokenStream2, Span}; +use syn::*; +type SynStream = TokenStream2; + +// Debugging: +/* +use std::fs::{OpenOptions}; +use std::io::Write; +*/ + + + + + + +#[proc_macro_derive(Object, attributes(pdf))] +pub fn object(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + // Build the impl + impl_object(&ast) +} + +#[proc_macro_derive(ObjectWrite, attributes(pdf))] +pub fn objectwrite(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + // Build the impl + impl_objectwrite(&ast) +} + +#[proc_macro_derive(DeepClone, attributes(pdf))] +pub fn deepclone(input: TokenStream) -> TokenStream { + let ast = parse_macro_input!(input as DeriveInput); + + // Build the impl + impl_deepclone(&ast) +} + + +#[derive(Default)] +struct FieldAttrs { + key: Option, + default: Option, + name: Option, + skip: bool, + other: bool, + indirect: bool, +} +impl FieldAttrs { + fn new() -> FieldAttrs { + FieldAttrs { + key: None, + default: None, + name: None, + skip: false, + other: false, + indirect: false, + } + } + fn key(&self) -> &LitStr { + self.key.as_ref().expect("no 'key' in field attributes") + } + fn default(&self) -> Option { + self.default.as_ref().map(|s| parse_str(&s.value()).expect("can't parse `default` as EXPR")) + } + fn parse(list: &[Attribute]) -> FieldAttrs { + let mut attrs = FieldAttrs::new(); + for attr in list.iter().filter(|attr| attr.path().is_ident("pdf")) { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("key") { + let value = meta.value()?; + attrs.key = Some(value.parse()?); + return Ok(()); + } + + if meta.path.is_ident("default") { + let value = meta.value()?; + attrs.default = Some(value.parse()?); + return Ok(()); + } + + if meta.path.is_ident("name") { + let value = meta.value()?; + attrs.name = Some(value.parse()?); + return Ok(()); + } + + if meta.path.is_ident("skip") { + attrs.skip = true; + return Ok(()); + } + + if meta.path.is_ident("other") { + attrs.other = true; + return Ok(()); + } + + if meta.path.is_ident("indirect") { + attrs.indirect = true; + return Ok(()); + } + + Err(meta.error("unsupported key")) + }).expect("parse error"); + } + attrs + } +} + + +/// Just the attributes for the whole struct +#[derive(Default, Debug)] +struct GlobalAttrs { + /// List of checks to do in the dictionary (LHS is the key, RHS is the expected value) + checks: Vec<(String, String)>, + type_name: Option, + type_required: bool, + is_stream: bool +} +impl GlobalAttrs { + /// The PDF type may be explicitly specified as an attribute with type "Type". Else, it is the name + /// of the struct. + fn from_ast(ast: &DeriveInput) -> GlobalAttrs { + let mut attrs = GlobalAttrs::default(); + + for attr in ast.attrs.iter().filter(|attr| attr.path().is_ident("pdf")) { + attr.parse_nested_meta(|meta| { + if meta.path.is_ident("Type") { + let value = meta.value()?; + let lit = value.parse()?; + match lit { + Lit::Str(ref value) => { + let mut value = value.value(); + attrs.type_required = if value.ends_with('?') { + value.pop(); // remove '?' + false + } else { + true + }; + attrs.type_name = Some(value); + }, + _ => panic!("Value of 'Type' attribute must be a String."), + }; + return Ok(()) + } + + if meta.path.is_ident("is_stream") { + attrs.is_stream = true; + return Ok(()) + } + + if let Ok(value) = meta.value() { + let path = &meta.path; + let lit = value.parse()?; + match lit { + Lit::Str(ref value) => { + let segments = path.segments + .iter() + .map(|s| s.ident.to_string()) + .collect::>() + .join("::"); + attrs.checks.push((segments, value.value())); + } + _ => panic!("Other checks must have RHS String."), + }; + return Ok(()) + } + + Ok(()) + }).expect("error with global attrs parsing"); + } + + attrs + } +} + +fn impl_object(ast: &DeriveInput) -> TokenStream { + let attrs = GlobalAttrs::from_ast(ast); + match (attrs.is_stream, &ast.data) { + (true, Data::Struct(ref data)) => impl_object_for_stream(ast, &data.fields).into(), + (false, Data::Struct(ref data)) => impl_object_for_struct(ast, &data.fields).into(), + (true, Data::Enum(ref variants)) => impl_enum_from_stream(ast, variants, &attrs).into(), + (false, Data::Enum(ref variants)) => impl_object_for_enum(ast, variants).into(), + (_, _) => unimplemented!() + } +} +fn impl_objectwrite(ast: &DeriveInput) -> TokenStream { + let attrs = GlobalAttrs::from_ast(ast); + match (attrs.is_stream, &ast.data) { + (false, Data::Struct(ref data)) => impl_objectwrite_for_struct(ast, &data.fields).into(), + (false, Data::Enum(ref variants)) => impl_objectwrite_for_enum(ast, variants).into(), + (_, _) => unimplemented!() + } +} +fn impl_deepclone(ast: &DeriveInput) -> TokenStream { + let attrs = GlobalAttrs::from_ast(ast); + match &ast.data { + Data::Struct(ref data) => impl_deepclone_for_struct(ast, &data.fields).into(), + Data::Enum(ref variants) => impl_deepclone_for_enum(ast, variants).into(), + _ => unimplemented!() + } +} + +fn enum_pairs(ast: &DeriveInput, data: &DataEnum) -> (Vec<(String, TokenStream2)>, Option) { + let id = &ast.ident; + + let mut pairs = Vec::with_capacity(data.variants.len()); + let mut other = None; + + for var in data.variants.iter() { + let attrs = FieldAttrs::parse(&var.attrs); + let var_ident = &var.ident; + let name = attrs + .name + .map(|lit| lit.value()) + .unwrap_or_else(|| var_ident.to_string()); + if attrs.other { + assert!(other.is_none(), "only one 'other' variant is allowed in a name enum"); + + match &var.fields { + Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {} + _ => { + panic!( + "the 'other' variant in a name enum should have exactly one unnamed field", + ); + } + } + other = Some(quote! { #id::#var_ident }); + } else { + pairs.push((name, quote! { #id::#var_ident })); + } + } + + (pairs, other) +} + + +/// Accepts Name to construct enum +fn impl_object_for_enum(ast: &DeriveInput, data: &DataEnum) -> SynStream { + let id = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let int_count = data.variants.iter().filter(|var| var.discriminant.is_some()).count(); + if int_count > 0 { + assert_eq!(int_count, data.variants.len(), "either none or all variants can have a descriminant"); + + let parts = data.variants.iter().map(|var| { + if let Some((_, Expr::Lit(ref lit_expr))) = var.discriminant { + let var_ident = &var.ident; + let pat = Pat::from(lit_expr.clone()); + quote! { + #pat => Ok(#id::#var_ident) + } + } else { + panic!() + } + }); + + quote! { + impl #impl_generics pdf::object::Object for #id #ty_generics #where_clause { + fn from_primitive(p: pdf::primitive::Primitive, _resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + match p { + pdf::primitive::Primitive::Integer(i) => { + match i { + #( #parts, )* + _ => Err(pdf::error::PdfError::UnknownVariant { id: stringify!(#id), name: i.to_string() }) + } + } + _ => Err(pdf::error::PdfError::UnexpectedPrimitive { expected: "Integer", found: p.get_debug_name() }), + } + } + } + } + } else { + let (pairs, other) = enum_pairs(ast, data); + + let mut parts: Vec<_> = pairs + .iter() + .map(|(name, var)| { + quote! { + #name => Ok(#var) + } + }) + .collect(); + + if let Some(other_tokens) = other { + parts.push(quote! { + s => Ok(#other_tokens(s.to_string())) + }); + } else { + parts.push(quote! { + s => Err(pdf::error::PdfError::UnknownVariant { id: stringify!(#id), name: s.to_string() }) + }); + } + + quote! { + impl #impl_generics pdf::object::Object for #id #ty_generics #where_clause { + fn from_primitive(p: pdf::primitive::Primitive, _resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + match p { + pdf::primitive::Primitive::Name(name) => { + match name.as_str() { + #( #parts, )* + } + } + _ => Err(pdf::error::PdfError::UnexpectedPrimitive { expected: "Name", found: p.get_debug_name() }), + } + } + } + } + } +} +/// Accepts Name to construct enum +fn impl_objectwrite_for_enum(ast: &DeriveInput, data: &DataEnum) -> SynStream { + let id = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let int_count = data.variants.iter().filter(|var| var.discriminant.is_some()).count(); + if int_count > 0 { + assert_eq!(int_count, data.variants.len(), "either none or all variants can have a descriminant"); + + let parts = data.variants.iter().map(|var| { + if let Some((_, ref expr)) = var.discriminant { + let var_ident = &var.ident; + quote! { + #id::#var_ident => Ok(Primitive::Integer(#expr)) + } + } else { + panic!() + } + }); + + quote! { + impl #impl_generics pdf::object::ObjectWrite for #id #ty_generics #where_clause { + fn to_primitive(&self, update: &mut impl pdf::object::Updater) -> Result { + match *self { + #( #parts, )* + } + } + } + } + } else { + let (pairs, other) = enum_pairs(ast, data); + + let mut ser_code: Vec<_> = pairs + .iter() + .map(|(name, var)| { + quote! { + #var => #name + } + }) + .collect(); + + if let Some(other_tokens) = other { + ser_code.push(quote! { + #other_tokens(ref name) => name.as_str() + }); + } + + quote! { + impl #impl_generics pdf::object::ObjectWrite for #id #ty_generics #where_clause { + fn to_primitive(&self, update: &mut impl pdf::object::Updater) -> Result { + let name = match *self { + #( #ser_code, )* + }; + + Ok(Primitive::Name(name.into())) + } + } + } + } +} +fn impl_deepclone_for_enum(ast: &DeriveInput, data: &DataEnum) -> SynStream { + let id = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let parts = data.variants.iter().map(|var| { + let var_ident = &var.ident; + match var.fields { + Fields::Unnamed(ref fields) => { + let labels: Vec = fields.unnamed.iter().enumerate().map(|(i, f)| { + Ident::new(&format!("f_{i}"), Span::mixed_site()) + }).collect(); + quote! { + #id::#var_ident( #( ref #labels, )* ) => Ok(#id::#var_ident( #( #labels.deep_clone(cloner)? ),* )) + } + } + Fields::Named(ref fields) => { + let names: Vec<_> = fields.named.iter().map(|f| f.ident.as_ref().unwrap()).collect(); + quote! { + #id::#var_ident { #( ref #names ),* } => Ok(#id::#var_ident { #( #names: #names.deep_clone(cloner)? ),* }) + } + } + Fields::Unit => { + quote! { + #id::#var_ident => Ok(#id::#var_ident) + } + } + } + }); + + quote! { + impl #impl_generics pdf::object::DeepClone for #id #ty_generics #where_clause { + fn deep_clone(&self, cloner: &mut impl pdf::object::Cloner) -> Result { + match *self { + #( #parts, )* + } + } + } + } +} + +fn impl_enum_from_stream(ast: &DeriveInput, data: &DataEnum, attrs: &GlobalAttrs) -> SynStream { + let id = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let ty_check = match (&attrs.type_name, attrs.type_required) { + (Some(ref ty), required) => quote! { + stream.info.expect(stringify!(#id), "Type", #ty, #required)?; + }, + (None, _) => quote!{} + }; + + let variants_code: Vec<_> = data.variants.iter().map(|var| { + let attrs = FieldAttrs::parse(&var.attrs); + let inner_ty = match var.fields { + Fields::Unnamed(ref fields) => { + assert_eq!(fields.unnamed.len(), 1, "all variants in a stream enum have to have exactly one unnamed field"); + fields.unnamed.first().unwrap().ty.clone() + }, + _ => panic!("all variants in a stream enum have to have exactly one unnamed field") + }; + let name = attrs.name.map(|lit| lit.value()).unwrap_or_else(|| var.ident.to_string()); + let variant_ident = &var.ident; + quote! { + #name => Ok(#id::#variant_ident ( #inner_ty::from_primitive(pdf::primitive::Primitive::Stream(stream), resolve)?)) + } + }).collect(); + + quote! { + impl #impl_generics pdf::object::Object for #id #ty_generics #where_clause { + fn from_primitive(p: pdf::primitive::Primitive, resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + let mut stream = PdfStream::from_primitive(p, resolve)?; + #ty_check + + let subty = stream.info.get("Subtype") + .ok_or(pdf::error::PdfError::MissingEntry { typ: stringify!(#id), field: "Subtype".into()})? + .as_name()?; + + match subty { + #( #variants_code, )* + s => Err(pdf::error::PdfError::UnknownVariant { id: stringify!(#id), name: s.into() }) + } + } + } + } +} + + + + +fn is_option(f: &Field) -> Option { + match f.ty { + Type::Path(ref p) => { + let first = p.path.segments.first().unwrap(); + match first { + PathSegment { ident, arguments: PathArguments::AngleBracketed(args) } if ident == "Option" => { + match args.args.first().unwrap() { + GenericArgument::Type(t) => Some(t.clone()), + _ => panic!() + } + } + _ => None + } + } + _ => None + } +} + +/// Accepts Dictionary to construct a struct +fn impl_object_for_struct(ast: &DeriveInput, fields: &Fields) -> SynStream { + let id = &ast.ident; + let mut generics = ast.generics.clone(); + for g in generics.params.iter_mut() { + if let GenericParam::Type(p) = g { + p.bounds.push( + parse_quote!(pdf::object::Object) + ); + } + } + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let attrs = GlobalAttrs::from_ast(ast); + + /////////////////////// + let typ = id.to_string(); + let let_parts = fields.iter().map(|field| { + + let name = &field.ident; + let attrs = FieldAttrs::parse(&field.attrs); + if attrs.skip { + return quote! {} + } + if attrs.other { + return quote! { + let #name = dict; + }; + } + + let key = attrs.key(); + + let ty = field.ty.clone(); + if let Some(ref default) = attrs.default() { + quote! { + let #name = { + let primitive: Option + = dict.remove(#key); + let x: #ty = match primitive { + Some(primitive) => <#ty as pdf::object::Object>::from_primitive(primitive, resolve).map_err(|e| + pdf::error::PdfError::FromPrimitive { + typ: #typ, + field: stringify!(#name), + source: Box::new(e) + })?, + None => #default, + }; + x + }; + } + } else { + quote! { + let #name = { + match dict.remove(#key) { + Some(primitive) => + match <#ty as pdf::object::Object>::from_primitive(primitive, resolve) { + Ok(obj) => obj, + Err(e) => return Err(pdf::error::PdfError::FromPrimitive { + typ: stringify!(#ty), + field: stringify!(#name), + source: Box::new(e) + }) + } + None => // Try to construct T from Primitive::Null + match <#ty as pdf::object::Object>::from_primitive(pdf::primitive::Primitive::Null, resolve) { + Ok(obj) => obj, + Err(_) => return Err(pdf::error::PdfError::MissingEntry { + typ: #typ, + field: String::from(stringify!(#name)), + }) + }, + } + // ^ By using Primitive::Null when we don't find the key, we allow 'optional' + // types like Option and Vec to be constructed from non-existing values + }; + } + } + }); + + let field_parts = fields.iter().map(|field| { + let name = &field.ident; + quote! { #name: #name, } + }); + + let checks: Vec<_> = attrs.checks.iter().map(|(key, val)| + quote! { + dict.expect(#typ, #key, #val, true)?; + } + ).collect(); + + let ty_check = match (&attrs.type_name, attrs.type_required) { + (Some(ref ty), required) => quote! { + dict.expect(#typ, "Type", #ty, #required)?; + }, + (None, _) => quote!{} + }; + + quote! { + impl #impl_generics pdf::object::FromDict for #id #ty_generics #where_clause { + fn from_dict(mut dict: pdf::primitive::Dictionary, resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + #ty_check + #( #checks )* + #( #let_parts )* + Ok(#id { + #( #field_parts )* + }) + } + } + impl #impl_generics pdf::object::Object for #id #ty_generics #where_clause { + fn from_primitive(p: pdf::primitive::Primitive, resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + let dict = pdf::primitive::Dictionary::from_primitive(p, resolve)?; + ::from_dict(dict, resolve) + } + } + } +} + +fn impl_objectwrite_for_struct(ast: &DeriveInput, fields: &Fields) -> SynStream { + let id = &ast.ident; + let mut generics = ast.generics.clone(); + for g in generics.params.iter_mut() { + if let GenericParam::Type(p) = g { + p.bounds.push( + parse_quote!(pdf::object::ObjectWrite) + ); + } + } + let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); + let attrs = GlobalAttrs::from_ast(ast); + + let parts: Vec<_> = fields.iter() + .map(|field| { + (field.ident.clone(), FieldAttrs::parse(&field.attrs), is_option(field)) + }).collect(); + + let fields_ser = parts.iter() + .map( |(field, attrs, _opt)| + if attrs.skip | attrs.other { + quote!() + } else { + let key = attrs.key(); + let tr = if attrs.indirect { + quote! { + match val { + pdf::primitive::Primitive::Reference(r) => val, + p => updater.create(p)?.into(), + } + } + } else { + quote! { val } + }; + + quote! { + let val = pdf::object::ObjectWrite::to_primitive(&self.#field, updater)?; + if !matches!(val, pdf::primitive::Primitive::Null) { + let val2 = #tr; + dict.insert(#key, val2); + } + } + } + ); + let checks_code = attrs.checks.iter().map(|(key, val)| + quote! { + dict.insert(#key, pdf::primitive::Primitive::Name(#val.into())); + } + ); + let pdf_type = match attrs.type_name { + Some(ref name) => quote! { + dict.insert("Type", pdf::primitive::Primitive::Name(#name.into())); + }, + None => quote! {} + }; + + let other = parts.iter().filter(|(field, attrs, _)| attrs.other).flat_map(|(field, _, _)| field).next(); + let init_dict = if let Some(other) = other { + quote! { + let mut dict = self.#other.clone(); + } + } else { + quote! { + let mut dict = pdf::primitive::Dictionary::new(); + } + }; + + quote! { + impl #impl_generics pdf::object::ObjectWrite for #id #ty_generics #where_clause { + fn to_primitive(&self, update: &mut impl pdf::object::Updater) -> Result { + pdf::object::ToDict::to_dict(self, update).map(pdf::primitive::Primitive::Dictionary) + } + } + impl #impl_generics pdf::object::ToDict for #id #ty_generics #where_clause { + fn to_dict(&self, updater: &mut impl pdf::object::Updater) -> Result { + #init_dict + #pdf_type + #( #checks_code )* + #(#fields_ser)* + Ok(dict) + } + } + } +} +fn impl_deepclone_for_struct(ast: &DeriveInput, fields: &Fields) -> SynStream { + let id = &ast.ident; + let mut generics = ast.generics.clone(); + for g in generics.params.iter_mut() { + if let GenericParam::Type(p) = g { + p.bounds.push( + parse_quote!(pdf::object::DeepClone) + ); + } + } + let (impl_generics, mut ty_generics, where_clause) = generics.split_for_impl(); + + let parts: Vec<_> = fields.iter() + .map(|field| { + (field.ident.clone(), is_option(field)) + }).collect(); + + let field_parts = parts.iter() + .map( |(field, _opt)| + { + quote! { + #field: self.#field.deep_clone(cloner)?, + } + } + ); + + quote! { + impl #impl_generics pdf::object::DeepClone for #id #ty_generics #where_clause { + fn deep_clone(&self, cloner: &mut impl pdf::object::Cloner) -> Result { + Ok(#id { + #( #field_parts )* + }) + } + } + } +} + +/// Note: must have info and dict (TODO explain in docs) +fn impl_object_for_stream(ast: &DeriveInput, fields: &Fields) -> SynStream { + let id = &ast.ident; + let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); + + let info_ty = fields.iter() + .filter_map(|field| { + if let Some(ident) = field.ident.as_ref() { + if ident == "info" { + Some(field.ty.clone()) + } else { + None + } + } else { + None + } + }).next().unwrap(); + + quote! { + impl #impl_generics pdf::object::Object for #id #ty_generics #where_clause { + fn from_primitive(p: pdf::primitive::Primitive, resolve: &impl pdf::object::Resolve) -> pdf::error::Result { + let pdf::primitive::PdfStream {info, data} + = p.to_stream(resolve)?; + + Ok(#id { + info: <#info_ty as pdf::object::Object>::from_primitive(pdf::primitive::Primitive::Dictionary (info), resolve)?, + data: data, + }) + } + } + } +} diff --git a/src-tauri/.gitignore b/src-tauri/.gitignore new file mode 100644 index 0000000..b21bd68 --- /dev/null +++ b/src-tauri/.gitignore @@ -0,0 +1,7 @@ +# Generated by Cargo +# will have compiled files and executables +/target/ + +# Generated by Tauri +# will have schema files for capabilities auto-completion +/gen/schemas diff --git a/src-tauri/Cargo.lock b/src-tauri/Cargo.lock new file mode 100644 index 0000000..7a24072 --- /dev/null +++ b/src-tauri/Cargo.lock @@ -0,0 +1,5691 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" + +[[package]] +name = "adler32" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "ahash" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" +dependencies = [ + "cfg-if", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anyhow" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" + +[[package]] +name = "ashpd" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9c39d707614dbcc6bed00015539f488d8e3fe3e66ed60961efc0c90f4b380b3" +dependencies = [ + "enumflags2", + "futures-channel", + "futures-util", + "rand 0.8.5", + "raw-window-handle", + "serde", + "serde_repr", + "tokio", + "url", + "wayland-backend", + "wayland-client", + "wayland-protocols", + "zbus", +] + +[[package]] +name = "async-broadcast" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "435a87a52755b8f27fcf321ac4f04b2802e337c8c4872923137471ec39c37532" +dependencies = [ + "event-listener", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-channel" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-executor" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +dependencies = [ + "async-task", + "concurrent-queue", + "fastrand", + "futures-lite", + "slab", +] + +[[package]] +name = "async-fs" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a" +dependencies = [ + "async-lock", + "blocking", + "futures-lite", +] + +[[package]] +name = "async-io" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +dependencies = [ + "async-lock", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-lock" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-process" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb" +dependencies = [ + "async-channel", + "async-io", + "async-lock", + "async-signal", + "async-task", + "blocking", + "cfg-if", + "event-listener", + "futures-lite", + "rustix", + "tracing", +] + +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "async-signal" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" +dependencies = [ + "async-io", + "async-lock", + "atomic-waker", + "cfg-if", + "futures-core", + "futures-io", + "rustix", + "signal-hook-registry", + "slab", + "windows-sys 0.59.0", +] + +[[package]] +name = "async-task" +version = "4.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" + +[[package]] +name = "async-trait" +version = "0.1.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "atk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241b621213072e993be4f6f3a9e4b45f65b7e6faad43001be957184b7bb1824b" +dependencies = [ + "atk-sys", + "glib", + "libc", +] + +[[package]] +name = "atk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e48b684b0ca77d2bbadeef17424c2ea3c897d44d566a1617e7e8f30614d086" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "backtrace" +version = "0.3.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36" +dependencies = [ + "serde", +] + +[[package]] +name = "block" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d8c1fef690941d3e7788d328517591fecc684c084084702d6ff1641e993699a" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block2" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c132eebf10f5cad5289222520a4a058514204aed6d791f1cf4fe8088b82d15f" +dependencies = [ + "objc2", +] + +[[package]] +name = "blocking" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +dependencies = [ + "async-channel", + "async-task", + "futures-io", + "futures-lite", + "piper", +] + +[[package]] +name = "brotli" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "4.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" + +[[package]] +name = "bytemuck" +version = "1.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +dependencies = [ + "serde", +] + +[[package]] +name = "cairo-rs" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2" +dependencies = [ + "bitflags 2.8.0", + "cairo-sys-rs", + "glib", + "libc", + "once_cell", + "thiserror 1.0.69", +] + +[[package]] +name = "cairo-sys-rs" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "685c9fa8e590b8b3d678873528d83411db17242a73fccaed827770ea0fedda51" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "camino" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo-platform" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" +dependencies = [ + "serde", +] + +[[package]] +name = "cargo_metadata" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8769706aad5d996120af43197bf46ef6ad0fda35216b4505f926a365a232d924" +dependencies = [ + "camino", + "cargo-platform", + "semver", + "serde", + "serde_json", + "thiserror 2.0.11", +] + +[[package]] +name = "cargo_toml" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fbd1fe9db3ebf71b89060adaf7b0504c2d6a425cf061313099547e382c2e472" +dependencies = [ + "serde", + "toml 0.8.19", +] + +[[package]] +name = "cbc" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" +dependencies = [ + "cipher", +] + +[[package]] +name = "cc" +version = "1.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13208fcbb66eaeffe09b99fffbe1af420f00a7b35aa99ad683dfc1aa76145229" +dependencies = [ + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cfb" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38f2da7a0a2c4ccf0065be06397cc26a81f4e528be095826eee9d4adbb8c60f" +dependencies = [ + "byteorder", + "fnv", + "uuid", +] + +[[package]] +name = "cfg-expr" +version = "0.15.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d067ad48b8650848b989a59a86c6c36a995d02d2bf778d45c3c5d57bc2718f02" +dependencies = [ + "smallvec", + "target-lexicon", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "num-traits", + "serde", + "windows-targets 0.52.6", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "cocoa" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f79398230a6e2c08f5c9760610eb6924b52aa9e7950a619602baba59dcbbdbb2" +dependencies = [ + "bitflags 2.8.0", + "block", + "cocoa-foundation", + "core-foundation", + "core-graphics", + "foreign-types", + "libc", + "objc", +] + +[[package]] +name = "cocoa-foundation" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14045fb83be07b5acf1c0884b2180461635b433455fa35d1cd6f17f1450679d" +dependencies = [ + "bitflags 2.8.0", + "block", + "core-foundation", + "core-graphics-types", + "libc", + "objc", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core-graphics" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" +dependencies = [ + "bitflags 2.8.0", + "core-foundation", + "core-graphics-types", + "foreign-types", + "libc", +] + +[[package]] +name = "core-graphics-types" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" +dependencies = [ + "bitflags 2.8.0", + "core-foundation", + "libc", +] + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "cssparser" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "754b69d351cdc2d8ee09ae203db831e005560fc6030da058f86ad60c92a9cb0a" +dependencies = [ + "cssparser-macros", + "dtoa-short", + "itoa 0.4.8", + "matches", + "phf 0.8.0", + "proc-macro2", + "quote", + "smallvec", + "syn 1.0.109", +] + +[[package]] +name = "cssparser-macros" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" +dependencies = [ + "quote", + "syn 2.0.96", +] + +[[package]] +name = "ctor" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" +dependencies = [ + "quote", + "syn 2.0.96", +] + +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.96", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "dary_heap" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04d2cd9c18b9f454ed67da600630b021a8a80bf33f8c95896ab33aaf1c26b728" + +[[package]] +name = "datasize" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e65c07d59e45d77a8bda53458c24a828893a99ac6cdd9c84111e09176ab739a2" +dependencies = [ + "datasize_derive", +] + +[[package]] +name = "datasize_derive" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613e4ee15899913285b7612004bbd490abd605be7b11d35afada5902fb6b91d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "deflate" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86f7e25f518f4b81808a2cf1c50996a61f5c2eb394b2393bd87f2a4780a432f" +dependencies = [ + "adler32", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +dependencies = [ + "powerfmt", + "serde", +] + +[[package]] +name = "derive_more" +version = "0.99.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.96", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "dirs" +version = "5.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-sys" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" +dependencies = [ + "libc", + "option-ext", + "redox_users", + "windows-sys 0.48.0", +] + +[[package]] +name = "dispatch" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "dlib" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "330c60081dcc4c72131f8eb70510f1ac07223e5d4163db481a04a0befcffa412" +dependencies = [ + "libloading", +] + +[[package]] +name = "dlopen2" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1297103d2bbaea85724fcee6294c2d50b1081f9ad47d0f6f6f61eda65315a6" +dependencies = [ + "dlopen2_derive", + "libc", + "once_cell", + "winapi", +] + +[[package]] +name = "dlopen2_derive" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b99bf03862d7f545ebc28ddd33a665b50865f4dfd84031a393823879bd4c54" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "downcast-rs" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" + +[[package]] +name = "dpi" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f25c0e292a7ca6d6498557ff1df68f32c99850012b6ea401cf8daf771f22ff53" +dependencies = [ + "serde", +] + +[[package]] +name = "dtoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" + +[[package]] +name = "dtoa-short" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87" +dependencies = [ + "dtoa", +] + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" + +[[package]] +name = "either" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" + +[[package]] +name = "embed-resource" +version = "2.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b68b6f9f63a0b6a38bc447d4ce84e2b388f3ec95c99c641c8ff0dd3ef89a6379" +dependencies = [ + "cc", + "memchr", + "rustc_version", + "toml 0.8.19", + "vswhom", + "winreg", +] + +[[package]] +name = "embed_plist" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" + +[[package]] +name = "endi" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3d8a32ae18130a3c84dd492d4215c3d913c3b07c6b63c2eb3eb7ff1101ab7bf" + +[[package]] +name = "enumflags2" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba2f4b465f5318854c6f8dd686ede6c0a9dc67d4b1ac241cf0eb51521a309147" +dependencies = [ + "enumflags2_derive", + "serde", +] + +[[package]] +name = "enumflags2_derive" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc4caf64a58d7a6d65ab00639b046ff54399a39f5f2554728895ace4b297cd79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "erased-serde" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d" +dependencies = [ + "serde", + "typeid", +] + +[[package]] +name = "errno" +version = "0.3.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "event-listener" +version = "5.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3492acde4c3fc54c845eaab3eed8bd00c7a7d881f78bfc801e43a93dec1331ae" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fax" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b03e33ad0e71af414ef9d2b0a94d23ff59115bb068e6a6a06c0952f2c22ffd77" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c1d7ffc9f2dc8316348c75281a99c8fdc60c1ddf4f82a366d117bf1b74d5a39" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "fdeflate" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e6853b52649d4ac5c0bd02320cddc5ba956bdb407c4b75a2c6b75bf51500f8c" +dependencies = [ + "simd-adler32", +] + +[[package]] +name = "field-offset" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38e2275cc4e4fc009b0669731a1e5ab7ebf11f469eaede2bab9309a5b4d6057f" +dependencies = [ + "memoffset", + "rustc_version", +] + +[[package]] +name = "flate2" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" +dependencies = [ + "foreign-types-macros", + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "foreign-types-shared" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futf" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" +dependencies = [ + "mac", + "new_debug_unreachable", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5edaec856126859abb19ed65f39e90fea3a9574b9707f13539acf4abf7eb532" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "gdk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f245958c627ac99d8e529166f9823fb3b838d1d41fd2b297af3075093c2691" +dependencies = [ + "cairo-rs", + "gdk-pixbuf", + "gdk-sys", + "gio", + "glib", + "libc", + "pango", +] + +[[package]] +name = "gdk-pixbuf" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50e1f5f1b0bfb830d6ccc8066d18db35c487b1b2b1e8589b5dfe9f07e8defaec" +dependencies = [ + "gdk-pixbuf-sys", + "gio", + "glib", + "libc", + "once_cell", +] + +[[package]] +name = "gdk-pixbuf-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9839ea644ed9c97a34d129ad56d38a25e6756f99f3a88e15cd39c20629caf7" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gdk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c2d13f38594ac1e66619e188c6d5a1adb98d11b2fcf7894fc416ad76aa2f3f7" +dependencies = [ + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkwayland-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "140071d506d223f7572b9f09b5e155afbd77428cd5cc7af8f2694c41d98dfe69" +dependencies = [ + "gdk-sys", + "glib-sys", + "gobject-sys", + "libc", + "pkg-config", + "system-deps", +] + +[[package]] +name = "gdkx11" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3caa00e14351bebbc8183b3c36690327eb77c49abc2268dd4bd36b856db3fbfe" +dependencies = [ + "gdk", + "gdkx11-sys", + "gio", + "glib", + "libc", + "x11", +] + +[[package]] +name = "gdkx11-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e7445fe01ac26f11601db260dd8608fe172514eb63b3b5e261ea6b0f4428d" +dependencies = [ + "gdk-sys", + "glib-sys", + "libc", + "system-deps", + "x11", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + +[[package]] +name = "getrandom" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +dependencies = [ + "cfg-if", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "gio" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fc8f532f87b79cbc51a79748f16a6828fb784be93145a322fa14d06d354c73" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-util", + "gio-sys", + "glib", + "libc", + "once_cell", + "pin-project-lite", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "gio-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37566df850baf5e4cb0dfb78af2e4b9898d817ed9263d1090a2df958c64737d2" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", + "winapi", +] + +[[package]] +name = "glib" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233daaf6e83ae6a12a52055f568f9d7cf4671dabb78ff9560ab6da230ce00ee5" +dependencies = [ + "bitflags 2.8.0", + "futures-channel", + "futures-core", + "futures-executor", + "futures-task", + "futures-util", + "gio-sys", + "glib-macros", + "glib-sys", + "gobject-sys", + "libc", + "memchr", + "once_cell", + "smallvec", + "thiserror 1.0.69", +] + +[[package]] +name = "glib-macros" +version = "0.18.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" +dependencies = [ + "heck 0.4.1", + "proc-macro-crate 2.0.0", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "glib-sys" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063ce2eb6a8d0ea93d2bf8ba1957e78dbab6be1c2220dd3daca57d5a9d869898" +dependencies = [ + "libc", + "system-deps", +] + +[[package]] +name = "glob" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" + +[[package]] +name = "globalcache" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240a3059d86f2ba6859ac79f95ff94e65606abc775c1bc0ecf9b6590fb35dc04" +dependencies = [ + "async-trait", + "tuple", + "web-time", +] + +[[package]] +name = "gobject-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0850127b514d1c4a4654ead6dedadb18198999985908e6ffe4436f53c785ce44" +dependencies = [ + "glib-sys", + "libc", + "system-deps", +] + +[[package]] +name = "gtk" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd56fb197bfc42bd5d2751f4f017d44ff59fbb58140c6b49f9b3b2bdab08506a" +dependencies = [ + "atk", + "cairo-rs", + "field-offset", + "futures-channel", + "gdk", + "gdk-pixbuf", + "gio", + "glib", + "gtk-sys", + "gtk3-macros", + "libc", + "pango", + "pkg-config", +] + +[[package]] +name = "gtk-sys" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f29a1c21c59553eb7dd40e918be54dccd60c52b049b75119d5d96ce6b624414" +dependencies = [ + "atk-sys", + "cairo-sys-rs", + "gdk-pixbuf-sys", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "pango-sys", + "system-deps", +] + +[[package]] +name = "gtk3-macros" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ff3c5b21f14f0736fed6dcfc0bfb4225ebf5725f3c0209edeec181e4d73e9d" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] + +[[package]] +name = "hashbrown" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "html5ever" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bea68cab48b8459f17cf1c944c67ddc572d272d9f2b274140f223ecb1da4a3b7" +dependencies = [ + "log", + "mac", + "markup5ever", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "http" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.14", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" + +[[package]] +name = "hyper" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "httparse", + "itoa 1.0.14", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.61" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "wasm-bindgen", + "windows-core 0.52.0", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "ico" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3804960be0bb5e4edb1e1ad67afd321a9ecfd875c3e65c099468fd2717d7cae" +dependencies = [ + "byteorder", + "png", +] + +[[package]] +name = "icu_collections" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locid" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_locid_transform" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_locid_transform_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_locid_transform_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + +[[package]] +name = "icu_normalizer" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "utf8_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + +[[package]] +name = "icu_properties" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locid_transform", + "icu_properties_data", + "icu_provider", + "tinystr", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + +[[package]] +name = "icu_provider" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +dependencies = [ + "displaydoc", + "icu_locid", + "icu_provider_macros", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_provider_macros" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c9c992b02b5b4c94ea26e32fe5bccb7aa7d9f390ab5c1221ff895bc7ea8b652" +dependencies = [ + "equivalent", + "hashbrown 0.15.2", + "serde", +] + +[[package]] +name = "infer" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc150e5ce2330295b8616ce0e3f53250e53af31759a9dbedad1621ba29151847" +dependencies = [ + "cfb", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "is-docker" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "928bae27f42bc99b60d9ac7334e3a21d10ad8f1835a4e12ec3ec0464765ed1b3" +dependencies = [ + "once_cell", +] + +[[package]] +name = "is-wsl" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "173609498df190136aa7dea1a91db051746d339e18476eed5ca40521f02d7aa5" +dependencies = [ + "is-docker", + "once_cell", +] + +[[package]] +name = "istring" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "875cc6fb9aecbc1a9bd736f2d18b12e0756b4c80c5e35e28262154abcb077a39" +dependencies = [ + "datasize", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" + +[[package]] +name = "javascriptcore-rs" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca5671e9ffce8ffba57afc24070e906da7fc4b1ba66f2cabebf61bf2ea257fcc" +dependencies = [ + "bitflags 1.3.2", + "glib", + "javascriptcore-rs-sys", +] + +[[package]] +name = "javascriptcore-rs-sys" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1be78d14ffa4b75b66df31840478fef72b51f8c2465d4ca7c194da9f7a5124" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jpeg-decoder" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5d4a7da358eff58addd2877a45865158f0d78c911d43a5784ceb7bbf52833b0" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "json-patch" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "863726d7afb6bc2590eeff7135d923545e5e964f004c2ccf8716c25e70a86f08" +dependencies = [ + "jsonptr", + "serde", + "serde_json", + "thiserror 1.0.69", +] + +[[package]] +name = "jsonptr" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dea2b27dd239b2556ed7a25ba842fe47fd602e7fc7433c2a8d6106d4d9edd70" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "keyboard-types" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b750dcadc39a09dbadd74e118f6dd6598df77fa01df0cfcdc52c28dece74528a" +dependencies = [ + "bitflags 2.8.0", + "serde", + "unicode-segmentation", +] + +[[package]] +name = "kuchikiki" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e4755b7b995046f510a7520c42b2fed58b77bd94d5a87a8eb43d2fd126da8" +dependencies = [ + "cssparser", + "html5ever", + "indexmap 1.9.3", + "matches", + "selectors", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libappindicator" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03589b9607c868cc7ae54c0b2a22c8dc03dd41692d48f2d7df73615c6a95dc0a" +dependencies = [ + "glib", + "gtk", + "gtk-sys", + "libappindicator-sys", + "log", +] + +[[package]] +name = "libappindicator-sys" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e9ec52138abedcc58dc17a7c6c0c00a2bdb4f3427c7f63fa97fd0d859155caf" +dependencies = [ + "gtk-sys", + "libloading", + "once_cell", +] + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "libflate" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45d9dfdc14ea4ef0900c1cddbc8dcd553fbaacd8a4a282cf4018ae9dd04fb21e" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e0d73b369f386f1c44abd9c570d5318f55ccde816ff4b562fa452e5182863d" +dependencies = [ + "core2", + "hashbrown 0.14.5", + "rle-decode-fast", +] + +[[package]] +name = "libloading" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" +dependencies = [ + "cfg-if", + "winapi", +] + +[[package]] +name = "libredox" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +dependencies = [ + "bitflags 2.8.0", + "libc", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "litemap" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" + +[[package]] +name = "lock_api" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" + +[[package]] +name = "mac" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" + +[[package]] +name = "malloc_buf" +version = "0.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb907fe88d54d8d9ce32a3cceab4218ed2f6b7d35617cafe9adf84e43919cb" +dependencies = [ + "libc", +] + +[[package]] +name = "markup5ever" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2629bb1404f3d34c2e921f21fd34ba00b206124c81f65c50b43b6aaefeb016" +dependencies = [ + "log", + "phf 0.10.1", + "phf_codegen 0.10.0", + "string_cache", + "string_cache_codegen", + "tendril", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "md5" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" + +[[package]] +name = "memchr" +version = "2.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8402cab7aefae129c6977bb0ff1b8fd9a04eb5b51efc50a70bea51cda0c7924" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "muda" +version = "0.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdae9c00e61cc0579bcac625e8ad22104c60548a025bfc972dc83868a28e1484" +dependencies = [ + "crossbeam-channel", + "dpi", + "gtk", + "keyboard-types", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "once_cell", + "png", + "serde", + "thiserror 1.0.69", + "windows-sys 0.59.0", +] + +[[package]] +name = "ndk" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" +dependencies = [ + "bitflags 2.8.0", + "jni-sys", + "log", + "ndk-sys", + "num_enum", + "raw-window-handle", + "thiserror 1.0.69", +] + +[[package]] +name = "ndk-context" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" + +[[package]] +name = "ndk-sys" +version = "0.6.0+11769913" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873" +dependencies = [ + "jni-sys", +] + +[[package]] +name = "new_debug_unreachable" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags 2.8.0", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "nodrop" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_enum" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" +dependencies = [ + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "objc" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "915b1b472bc21c53464d6c8461c9d3af805ba1ef837e1cac254428f4a77177b1" +dependencies = [ + "malloc_buf", +] + +[[package]] +name = "objc-sys" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb91bdd390c7ce1a8607f35f3ca7151b65afc0ff5ff3b34fa350f7d7c7e4310" +dependencies = [ + "cc", +] + +[[package]] +name = "objc2" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46a785d4eeff09c14c487497c162e92766fbb3e4059a71840cecc03d9a50b804" +dependencies = [ + "objc-sys", + "objc2-encode", +] + +[[package]] +name = "objc2-app-kit" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4e89ad9e3d7d297152b17d39ed92cd50ca8063a89a9fa569046d41568891eff" +dependencies = [ + "bitflags 2.8.0", + "block2", + "libc", + "objc2", + "objc2-core-data", + "objc2-core-image", + "objc2-foundation", + "objc2-quartz-core", +] + +[[package]] +name = "objc2-cloud-kit" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74dd3b56391c7a0596a295029734d3c1c5e7e510a4cb30245f8221ccea96b009" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-core-location", + "objc2-foundation", +] + +[[package]] +name = "objc2-contacts" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5ff520e9c33812fd374d8deecef01d4a840e7b41862d849513de77e44aa4889" +dependencies = [ + "block2", + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-core-data" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "617fbf49e071c178c0b24c080767db52958f716d9eabdf0890523aeae54773ef" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-core-image" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55260963a527c99f1819c4f8e3b47fe04f9650694ef348ffd2227e8196d34c80" +dependencies = [ + "block2", + "objc2", + "objc2-foundation", + "objc2-metal", +] + +[[package]] +name = "objc2-core-location" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "000cfee34e683244f284252ee206a27953279d370e309649dc3ee317b37e5781" +dependencies = [ + "block2", + "objc2", + "objc2-contacts", + "objc2-foundation", +] + +[[package]] +name = "objc2-encode" +version = "4.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7891e71393cd1f227313c9379a26a584ff3d7e6e7159e988851f0934c993f0f8" + +[[package]] +name = "objc2-foundation" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8" +dependencies = [ + "bitflags 2.8.0", + "block2", + "dispatch", + "libc", + "objc2", +] + +[[package]] +name = "objc2-link-presentation" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a1ae721c5e35be65f01a03b6d2ac13a54cb4fa70d8a5da293d7b0020261398" +dependencies = [ + "block2", + "objc2", + "objc2-app-kit", + "objc2-foundation", +] + +[[package]] +name = "objc2-metal" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-quartz-core" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-foundation", + "objc2-metal", +] + +[[package]] +name = "objc2-symbols" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a684efe3dec1b305badae1a28f6555f6ddd3bb2c2267896782858d5a78404dc" +dependencies = [ + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-ui-kit" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8bb46798b20cd6b91cbd113524c490f1686f4c4e8f49502431415f3512e2b6f" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-cloud-kit", + "objc2-core-data", + "objc2-core-image", + "objc2-core-location", + "objc2-foundation", + "objc2-link-presentation", + "objc2-quartz-core", + "objc2-symbols", + "objc2-uniform-type-identifiers", + "objc2-user-notifications", +] + +[[package]] +name = "objc2-uniform-type-identifiers" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44fa5f9748dbfe1ca6c0b79ad20725a11eca7c2218bceb4b005cb1be26273bfe" +dependencies = [ + "block2", + "objc2", + "objc2-foundation", +] + +[[package]] +name = "objc2-user-notifications" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76cfcbf642358e8689af64cee815d139339f3ed8ad05103ed5eaf73db8d84cb3" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-core-location", + "objc2-foundation", +] + +[[package]] +name = "objc2-web-kit" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68bc69301064cebefc6c4c90ce9cba69225239e4b8ff99d445a2b5563797da65" +dependencies = [ + "bitflags 2.8.0", + "block2", + "objc2", + "objc2-app-kit", + "objc2-foundation", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "open" +version = "5.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2483562e62ea94312f3576a7aca397306df7990b8d89033e18766744377ef95" +dependencies = [ + "dunce", + "is-wsl", + "libc", + "pathdiff", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-stream" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "pango" +version = "0.18.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ca27ec1eb0457ab26f3036ea52229edbdb74dee1edd29063f5b9b010e7ebee4" +dependencies = [ + "gio", + "glib", + "libc", + "once_cell", + "pango-sys", +] + +[[package]] +name = "pango-sys" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436737e391a843e5933d6d9aa102cb126d501e815b83601365a948a518555dc5" +dependencies = [ + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + +[[package]] +name = "pdf" +version = "0.9.1" +dependencies = [ + "aes", + "bitflags 2.8.0", + "cbc", + "datasize", + "deflate", + "fax", + "globalcache", + "indexmap 2.7.1", + "istring", + "itertools", + "jpeg-decoder", + "libflate", + "log", + "md5", + "once_cell", + "pdf_derive", + "sha2", + "snafu", + "stringprep", + "weezl", +] + +[[package]] +name = "pdf-forge" +version = "0.1.0" +dependencies = [ + "pdf", + "serde", + "serde_json", + "tauri", + "tauri-build", + "tauri-plugin-dialog", + "tauri-plugin-fs", + "tauri-plugin-opener", + "uuid", +] + +[[package]] +name = "pdf_derive" +version = "0.2.0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "phf" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dfb61232e34fcb633f43d12c58f83c1df82962dcdfa565a4e866ffc17dafe12" +dependencies = [ + "phf_macros 0.8.0", + "phf_shared 0.8.0", + "proc-macro-hack", +] + +[[package]] +name = "phf" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fabbf1ead8a5bcbc20f5f8b939ee3f5b0f6f281b6ad3468b84656b658b455259" +dependencies = [ + "phf_shared 0.10.0", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf_codegen" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbffee61585b0411840d3ece935cce9cb6321f01c45477d30066498cd5e1a815" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", +] + +[[package]] +name = "phf_codegen" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb1c3a8bc4dd4e5cfce29b44ffc14bedd2ee294559a294e2a4d4c9e9a6a13cd" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", +] + +[[package]] +name = "phf_generator" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17367f0cc86f2d25802b2c26ee58a7b23faeccf78a396094c13dced0d0182526" +dependencies = [ + "phf_shared 0.8.0", + "rand 0.7.3", +] + +[[package]] +name = "phf_generator" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" +dependencies = [ + "phf_shared 0.10.0", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared 0.11.3", + "rand 0.8.5", +] + +[[package]] +name = "phf_macros" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f6fde18ff429ffc8fe78e2bf7f8b7a5a5a6e2a8b58bc5a9ac69198bbda9189c" +dependencies = [ + "phf_generator 0.8.0", + "phf_shared 0.8.0", + "proc-macro-hack", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "phf_macros" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "phf_shared" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" +dependencies = [ + "siphasher 0.3.11", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher 1.0.1", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "piper" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" +dependencies = [ + "atomic-waker", + "fastrand", + "futures-io", +] + +[[package]] +name = "pkg-config" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" + +[[package]] +name = "plist" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016" +dependencies = [ + "base64 0.22.1", + "indexmap 2.7.1", + "quick-xml 0.32.0", + "serde", + "time", +] + +[[package]] +name = "png" +version = "0.17.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +dependencies = [ + "bitflags 1.3.2", + "crc32fast", + "fdeflate", + "flate2", + "miniz_oxide", +] + +[[package]] +name = "polling" +version = "3.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "precomputed-hash" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" + +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit 0.19.15", +] + +[[package]] +name = "proc-macro-crate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +dependencies = [ + "toml_edit 0.20.7", +] + +[[package]] +name = "proc-macro-crate" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +dependencies = [ + "toml_edit 0.22.22", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.20+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" + +[[package]] +name = "proc-macro2" +version = "1.0.93" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quick-xml" +version = "0.32.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +dependencies = [ + "memchr", +] + +[[package]] +name = "quick-xml" +version = "0.36.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" +dependencies = [ + "memchr", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", + "rand_pcg", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.15", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "rand_pcg" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16abd0c1b639e9eb4d7c50c0b8100b0d0f849be2349829c740fe8e6eb4816429" +dependencies = [ + "rand_core 0.5.1", +] + +[[package]] +name = "raw-window-handle" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" + +[[package]] +name = "redox_syscall" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +dependencies = [ + "bitflags 2.8.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.15", + "libredox", + "thiserror 1.0.69", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.12.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43e734407157c3c2034e0258f5e4473ddb361b1e85f95a66690d67264d7cd1da" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "once_cell", + "percent-encoding", + "pin-project-lite", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-util", + "tower", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "windows-registry", +] + +[[package]] +name = "rfd" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a24763657bff09769a8ccf12c8b8a50416fb035fe199263b4c5071e4e3f006f" +dependencies = [ + "ashpd", + "block2", + "core-foundation", + "core-foundation-sys", + "glib-sys", + "gobject-sys", + "gtk-sys", + "js-sys", + "log", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "raw-window-handle", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" + +[[package]] +name = "rustc-demangle" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "rustix" +version = "0.38.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" +dependencies = [ + "bitflags 2.8.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustversion" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c45b9784283f1b2e7fb61b42047c2fd678ef0960d4f6f1eba131594cc369d4" + +[[package]] +name = "ryu" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schemars" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +dependencies = [ + "dyn-clone", + "indexmap 1.9.3", + "schemars_derive", + "serde", + "serde_json", + "url", + "uuid", +] + +[[package]] +name = "schemars_derive" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.96", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "selectors" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df320f1889ac4ba6bc0cdc9c9af7af4bd64bb927bccdf32d81140dc1f9be12fe" +dependencies = [ + "bitflags 1.3.2", + "cssparser", + "derive_more", + "fxhash", + "log", + "matches", + "phf 0.8.0", + "phf_codegen 0.8.0", + "precomputed-hash", + "servo_arc", + "smallvec", + "thin-slice", +] + +[[package]] +name = "semver" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f79dfe2d285b0488816f30e700a7438c5a73d816b5b7d3ac72fbc48b0d185e03" +dependencies = [ + "serde", +] + +[[package]] +name = "serde" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-untagged" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2676ba99bd82f75cae5cbd2c8eda6fa0b8760f18978ea840e980dd5567b5c5b6" +dependencies = [ + "erased-serde", + "serde", + "typeid", +] + +[[package]] +name = "serde_derive" +version = "1.0.217" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_json" +version = "1.0.137" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "930cfb6e6abf99298aaad7d29abbef7a9999a9a8806a40088f55f0dcec03146b" +dependencies = [ + "itoa 1.0.14", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serde_spanned" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa 1.0.14", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6b6f7f2fcb69f747921f79f3926bd1e203fce4fef62c268dd3abfb6d86029aa" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.7.1", + "serde", + "serde_derive", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d00caa5193a3c8362ac2b73be6b9e768aa5a4b2f721d8f4b339600c3cb51f8e" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "serialize-to-javascript" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9823f2d3b6a81d98228151fdeaf848206a7855a7a042bbf9bf870449a66cafb" +dependencies = [ + "serde", + "serde_json", + "serialize-to-javascript-impl", +] + +[[package]] +name = "serialize-to-javascript-impl" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74064874e9f6a15f04c1f3cb627902d0e6b410abbf36668afa873c61889f1763" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "servo_arc" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d98238b800e0d1576d8b6e3de32827c2d74bee68bb97748dcf5071fb53965432" +dependencies = [ + "nodrop", + "stable_deref_trait", +] + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +dependencies = [ + "libc", +] + +[[package]] +name = "simd-adler32" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" + +[[package]] +name = "siphasher" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "snafu" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "223891c85e2a29c3fe8fb900c1fae5e69c2e42415e3177752e8718475efa5019" +dependencies = [ + "snafu-derive", +] + +[[package]] +name = "snafu-derive" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c3c6b7927ffe7ecaa769ee0e3994da3b8cafc8f444578982c83ecb161af917" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "socket2" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "softbuffer" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18051cdd562e792cad055119e0cdb2cfc137e44e3987532e0f9659a77931bb08" +dependencies = [ + "bytemuck", + "cfg_aliases", + "core-graphics", + "foreign-types", + "js-sys", + "log", + "objc2", + "objc2-foundation", + "objc2-quartz-core", + "raw-window-handle", + "redox_syscall", + "wasm-bindgen", + "web-sys", + "windows-sys 0.59.0", +] + +[[package]] +name = "soup3" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "471f924a40f31251afc77450e781cb26d55c0b650842efafc9c6cbd2f7cc4f9f" +dependencies = [ + "futures-channel", + "gio", + "glib", + "libc", + "soup3-sys", +] + +[[package]] +name = "soup3-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ebe8950a680a12f24f15ebe1bf70db7af98ad242d9db43596ad3108aab86c27" +dependencies = [ + "gio-sys", + "glib-sys", + "gobject-sys", + "libc", + "system-deps", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "string_cache" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +dependencies = [ + "new_debug_unreachable", + "once_cell", + "parking_lot", + "phf_shared 0.10.0", + "precomputed-hash", + "serde", +] + +[[package]] +name = "string_cache_codegen" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +dependencies = [ + "phf_generator 0.10.0", + "phf_shared 0.10.0", + "proc-macro2", + "quote", +] + +[[package]] +name = "stringprep" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" +dependencies = [ + "unicode-bidi", + "unicode-normalization", + "unicode-properties", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "swift-rs" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4057c98e2e852d51fdcfca832aac7b571f6b351ad159f9eda5db1655f8d0c4d7" +dependencies = [ + "base64 0.21.7", + "serde", + "serde_json", +] + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5d0adab1ae378d7f53bdebc67a39f1f151407ef230f0ce2883572f5d8985c80" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "system-deps" +version = "6.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e535eb8dded36d55ec13eddacd30dec501792ff23a0b1682c38601b8cf2349" +dependencies = [ + "cfg-expr", + "heck 0.5.0", + "pkg-config", + "toml 0.8.19", + "version-compare", +] + +[[package]] +name = "tao" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3731d04d4ac210cd5f344087733943b9bfb1a32654387dad4d1c70de21aee2c9" +dependencies = [ + "bitflags 2.8.0", + "cocoa", + "core-foundation", + "core-graphics", + "crossbeam-channel", + "dispatch", + "dlopen2", + "dpi", + "gdkwayland-sys", + "gdkx11-sys", + "gtk", + "jni", + "lazy_static", + "libc", + "log", + "ndk", + "ndk-context", + "ndk-sys", + "objc", + "once_cell", + "parking_lot", + "raw-window-handle", + "scopeguard", + "tao-macros", + "unicode-segmentation", + "url", + "windows", + "windows-core 0.58.0", + "windows-version", + "x11-dl", +] + +[[package]] +name = "tao-macros" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4e16beb8b2ac17db28eab8bca40e62dbfbb34c0fcdc6d9826b11b7b5d047dfd" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "tauri" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78f6efc261c7905839b4914889a5b25df07f0ff89c63fb4afd6ff8c96af15e4d" +dependencies = [ + "anyhow", + "bytes", + "dirs", + "dunce", + "embed_plist", + "futures-util", + "getrandom 0.2.15", + "glob", + "gtk", + "heck 0.5.0", + "http", + "jni", + "libc", + "log", + "mime", + "muda", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "percent-encoding", + "plist", + "raw-window-handle", + "reqwest", + "serde", + "serde_json", + "serde_repr", + "serialize-to-javascript", + "swift-rs", + "tauri-build", + "tauri-macros", + "tauri-runtime", + "tauri-runtime-wry", + "tauri-utils", + "thiserror 2.0.11", + "tokio", + "tray-icon", + "url", + "urlpattern", + "webkit2gtk", + "webview2-com", + "window-vibrancy", + "windows", +] + +[[package]] +name = "tauri-build" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e950124f6779c6cf98e3260c7a6c8488a74aa6350dd54c6950fdaa349bca2df" +dependencies = [ + "anyhow", + "cargo_toml", + "dirs", + "glob", + "heck 0.5.0", + "json-patch", + "schemars", + "semver", + "serde", + "serde_json", + "tauri-utils", + "tauri-winres", + "toml 0.8.19", + "walkdir", +] + +[[package]] +name = "tauri-codegen" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f77894f9ddb5cb6c04fcfe8c8869ebe0aded4dabf19917118d48be4a95599ab5" +dependencies = [ + "base64 0.22.1", + "brotli", + "ico", + "json-patch", + "plist", + "png", + "proc-macro2", + "quote", + "semver", + "serde", + "serde_json", + "sha2", + "syn 2.0.96", + "tauri-utils", + "thiserror 2.0.11", + "time", + "url", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-macros" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3240a5caed760a532e8f687be6f05b2c7d11a1d791fb53ccc08cfeb3e5308736" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "quote", + "syn 2.0.96", + "tauri-codegen", + "tauri-utils", +] + +[[package]] +name = "tauri-plugin" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5841b9a0200e954ef7457f8d327091424328891e267a97b641dc246cc54d0dec" +dependencies = [ + "anyhow", + "glob", + "plist", + "schemars", + "serde", + "serde_json", + "tauri-utils", + "toml 0.8.19", + "walkdir", +] + +[[package]] +name = "tauri-plugin-dialog" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b59fd750551b1066744ab956a1cd6b1ea3e1b3763b0b9153ac27a044d596426" +dependencies = [ + "log", + "raw-window-handle", + "rfd", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "tauri-plugin-fs", + "thiserror 2.0.11", + "url", +] + +[[package]] +name = "tauri-plugin-fs" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1a1edf18000f02903a7c2e5997fb89aca455ecbc0acc15c6535afbb883be223" +dependencies = [ + "anyhow", + "dunce", + "glob", + "percent-encoding", + "schemars", + "serde", + "serde_json", + "serde_repr", + "tauri", + "tauri-plugin", + "tauri-utils", + "thiserror 2.0.11", + "toml 0.8.19", + "url", + "uuid", +] + +[[package]] +name = "tauri-plugin-opener" +version = "2.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "635ed7c580dc3cdc61c94097d38ef517d749ffc0141c806d904e68e4b0cf1c2a" +dependencies = [ + "dunce", + "glob", + "objc2-app-kit", + "objc2-foundation", + "open", + "schemars", + "serde", + "serde_json", + "tauri", + "tauri-plugin", + "thiserror 2.0.11", + "url", + "windows", + "zbus", +] + +[[package]] +name = "tauri-runtime" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2274ef891ccc0a8d318deffa9d70053f947664d12d58b9c0d1ae5e89237e01f7" +dependencies = [ + "dpi", + "gtk", + "http", + "jni", + "raw-window-handle", + "serde", + "serde_json", + "tauri-utils", + "thiserror 2.0.11", + "url", + "windows", +] + +[[package]] +name = "tauri-runtime-wry" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3707b40711d3b9f6519150869e358ffbde7c57567fb9b5a8b51150606939b2a0" +dependencies = [ + "gtk", + "http", + "jni", + "log", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "percent-encoding", + "raw-window-handle", + "softbuffer", + "tao", + "tauri-runtime", + "tauri-utils", + "url", + "webkit2gtk", + "webview2-com", + "windows", + "wry", +] + +[[package]] +name = "tauri-utils" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96fb10e7cc97456b2d5b9c03e335b5de5da982039a303a20d10006885e4523a0" +dependencies = [ + "brotli", + "cargo_metadata", + "ctor", + "dunce", + "glob", + "html5ever", + "http", + "infer", + "json-patch", + "kuchikiki", + "log", + "memchr", + "phf 0.11.3", + "proc-macro2", + "quote", + "regex", + "schemars", + "semver", + "serde", + "serde-untagged", + "serde_json", + "serde_with", + "swift-rs", + "thiserror 2.0.11", + "toml 0.8.19", + "url", + "urlpattern", + "uuid", + "walkdir", +] + +[[package]] +name = "tauri-winres" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5993dc129e544393574288923d1ec447c857f3f644187f4fbf7d9a875fbfc4fb" +dependencies = [ + "embed-resource", + "toml 0.7.8", +] + +[[package]] +name = "tempfile" +version = "3.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" +dependencies = [ + "cfg-if", + "fastrand", + "getrandom 0.2.15", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "tendril" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0" +dependencies = [ + "futf", + "mac", + "utf-8", +] + +[[package]] +name = "thin-slice" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaa81235c7058867fa8c0e7314f33dcce9c215f535d1913822a2b3f5e289f3c" + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d452f284b73e6d76dd36758a0c8684b1d5be31f92b89d07fd5822175732206fc" +dependencies = [ + "thiserror-impl 2.0.11", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26afc1baea8a989337eeb52b6e72a039780ce45c3edfcc9c5b9d112feeb173c2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "time" +version = "0.3.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +dependencies = [ + "deranged", + "itoa 1.0.14", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" + +[[package]] +name = "time-macros" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "022db8904dfa342efe721985167e9fcd16c29b226db4397ed752a761cfce81e8" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d61fa4ffa3de412bfea335c6ecff681de2b609ba3c77ef3e00e521813a9ed9e" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "signal-hook-registry", + "socket2", + "tracing", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "toml" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.19.15", +] + +[[package]] +name = "toml" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +dependencies = [ + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit 0.22.22", +] + +[[package]] +name = "toml_datetime" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.19.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +dependencies = [ + "indexmap 2.7.1", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.7.1", + "toml_datetime", + "winnow 0.5.40", +] + +[[package]] +name = "toml_edit" +version = "0.22.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" +dependencies = [ + "indexmap 2.7.1", + "serde", + "serde_spanned", + "toml_datetime", + "winnow 0.6.24", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "tracing-core" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tray-icon" +version = "0.19.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d48a05076dd272615d03033bf04f480199f7d1b66a8ac64d75c625fc4a70c06b" +dependencies = [ + "core-graphics", + "crossbeam-channel", + "dirs", + "libappindicator", + "muda", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "once_cell", + "png", + "serde", + "thiserror 1.0.69", + "windows-sys 0.59.0", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tuple" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9bb9f6bd73479481158ba8ee3edf17aca93354623d13f02e96a2014fdbc1c37e" +dependencies = [ + "num-traits", + "serde", +] + +[[package]] +name = "typeid" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e13db2e0ccd5e14a544e8a246ba2312cd25223f616442d7f2cb0e3db614236e" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "uds_windows" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" +dependencies = [ + "memoffset", + "tempfile", + "winapi", +] + +[[package]] +name = "unic-char-property" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8c57a407d9b6fa02b4795eb81c5b6652060a15a7903ea981f3d723e6c0be221" +dependencies = [ + "unic-char-range", +] + +[[package]] +name = "unic-char-range" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0398022d5f700414f6b899e10b8348231abf9173fa93144cbc1a43b9793c1fbc" + +[[package]] +name = "unic-common" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80d7ff825a6a654ee85a63e80f92f054f904f21e7d12da4e22f9834a4aaa35bc" + +[[package]] +name = "unic-ucd-ident" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e230a37c0381caa9219d67cf063aa3a375ffed5bf541a452db16e744bdab6987" +dependencies = [ + "unic-char-property", + "unic-char-range", + "unic-ucd-version", +] + +[[package]] +name = "unic-ucd-version" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96bd2f2237fe450fcd0a1d2f5f4e91711124f7857ba2e964247776ebeeb7b0c4" +dependencies = [ + "unic-common", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "unicode-normalization" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-properties" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e70f2a8b45122e719eb623c01822704c4e0907e7e426a05927e1a1cfff5b75d0" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", +] + +[[package]] +name = "urlpattern" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70acd30e3aa1450bc2eece896ce2ad0d178e9c079493819301573dae3c37ba6d" +dependencies = [ + "regex", + "serde", + "unic-ucd-ident", + "url", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "uuid" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744018581f9a3454a9e15beb8a33b017183f1e7c0cd170232a2d1453b23a51c4" +dependencies = [ + "getrandom 0.2.15", + "serde", +] + +[[package]] +name = "version-compare" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "852e951cb7832cb45cb1169900d19760cfa39b82bc0ea9c0e5a14ae88411c98b" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "vswhom" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be979b7f07507105799e854203b470ff7c78a1639e330a58f183b5fea574608b" +dependencies = [ + "libc", + "vswhom-sys", +] + +[[package]] +name = "vswhom-sys" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3b17ae1f6c8a2b28506cd96d412eebf83b4a0ff2cbefeeb952f2f9dfa44ba18" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wayland-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "056535ced7a150d45159d3a8dc30f91a2e2d588ca0b23f70e56033622b8016f6" +dependencies = [ + "cc", + "downcast-rs", + "rustix", + "scoped-tls", + "smallvec", + "wayland-sys", +] + +[[package]] +name = "wayland-client" +version = "0.31.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66249d3fc69f76fd74c82cc319300faa554e9d865dab1f7cd66cc20db10b280" +dependencies = [ + "bitflags 2.8.0", + "rustix", + "wayland-backend", + "wayland-scanner", +] + +[[package]] +name = "wayland-protocols" +version = "0.32.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd0ade57c4e6e9a8952741325c30bf82f4246885dca8bf561898b86d0c1f58e" +dependencies = [ + "bitflags 2.8.0", + "wayland-backend", + "wayland-client", + "wayland-scanner", +] + +[[package]] +name = "wayland-scanner" +version = "0.31.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597f2001b2e5fc1121e3d5b9791d3e78f05ba6bfa4641053846248e3a13661c3" +dependencies = [ + "proc-macro2", + "quick-xml 0.36.2", + "quote", +] + +[[package]] +name = "wayland-sys" +version = "0.31.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efa8ac0d8e8ed3e3b5c9fc92c7881406a268e11555abe36493efabe649a29e09" +dependencies = [ + "dlib", + "log", + "pkg-config", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webkit2gtk" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76b1bc1e54c581da1e9f179d0b38512ba358fb1af2d634a1affe42e37172361a" +dependencies = [ + "bitflags 1.3.2", + "cairo-rs", + "gdk", + "gdk-sys", + "gio", + "gio-sys", + "glib", + "glib-sys", + "gobject-sys", + "gtk", + "gtk-sys", + "javascriptcore-rs", + "libc", + "once_cell", + "soup3", + "webkit2gtk-sys", +] + +[[package]] +name = "webkit2gtk-sys" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62daa38afc514d1f8f12b8693d30d5993ff77ced33ce30cd04deebc267a6d57c" +dependencies = [ + "bitflags 1.3.2", + "cairo-sys-rs", + "gdk-sys", + "gio-sys", + "glib-sys", + "gobject-sys", + "gtk-sys", + "javascriptcore-rs-sys", + "libc", + "pkg-config", + "soup3-sys", + "system-deps", +] + +[[package]] +name = "webview2-com" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "823e7ebcfaea51e78f72c87fc3b65a1e602c321f407a0b36dbb327d7bb7cd921" +dependencies = [ + "webview2-com-macros", + "webview2-com-sys", + "windows", + "windows-core 0.58.0", + "windows-implement", + "windows-interface", +] + +[[package]] +name = "webview2-com-macros" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d228f15bba3b9d56dde8bddbee66fa24545bd17b48d5128ccf4a8742b18e431" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "webview2-com-sys" +version = "0.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a82bce72db6e5ee83c68b5de1e2cd6ea195b9fbff91cb37df5884cbe3222df4" +dependencies = [ + "thiserror 1.0.69", + "windows", + "windows-core 0.58.0", +] + +[[package]] +name = "weezl" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "window-vibrancy" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ea403deff7b51fff19e261330f71608ff2cdef5721d72b64180bb95be7c4150" +dependencies = [ + "objc2", + "objc2-app-kit", + "objc2-foundation", + "raw-window-handle", + "windows-sys 0.59.0", + "windows-version", +] + +[[package]] +name = "windows" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd04d41d93c4992d421894c18c8b43496aa748dd4c081bac0dc93eb0489272b6" +dependencies = [ + "windows-core 0.58.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba6d44ec8c2591c134257ce647b7ea6b20335bf6379a27dac5f1641fcf59f99" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-implement" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bbd5b46c938e506ecbce286b6628a02171d56153ba733b6c741fc627ec9579b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "windows-interface" +version = "0.58.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053c4c462dc91d3b1504c6fe5a726dd15e216ba718e84a0e46a88fbe5ded3515" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" +dependencies = [ + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows-version" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c12476c23a74725c539b24eae8bfc0dac4029c39cdb561d9f23616accd4ae26d" +dependencies = [ + "windows-targets 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "winnow" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d71a593cc5c42ad7876e2c1fda56f314f3754c084128833e64f1345ff8a03a" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + +[[package]] +name = "wry" +version = "0.48.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2e33c08b174442ff80d5c791020696f9f8b4e4a87b8cfc7494aad6167ec44e1" +dependencies = [ + "base64 0.22.1", + "block2", + "cookie", + "crossbeam-channel", + "dpi", + "dunce", + "gdkx11", + "gtk", + "html5ever", + "http", + "javascriptcore-rs", + "jni", + "kuchikiki", + "libc", + "ndk", + "objc2", + "objc2-app-kit", + "objc2-foundation", + "objc2-ui-kit", + "objc2-web-kit", + "once_cell", + "percent-encoding", + "raw-window-handle", + "sha2", + "soup3", + "tao-macros", + "thiserror 2.0.11", + "url", + "webkit2gtk", + "webkit2gtk-sys", + "webview2-com", + "windows", + "windows-core 0.58.0", + "windows-version", + "x11-dl", +] + +[[package]] +name = "x11" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "502da5464ccd04011667b11c435cb992822c2c0dbde1770c988480d312a0db2e" +dependencies = [ + "libc", + "pkg-config", +] + +[[package]] +name = "x11-dl" +version = "2.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38735924fedd5314a6e548792904ed8c6de6636285cb9fec04d5b1db85c1516f" +dependencies = [ + "libc", + "once_cell", + "pkg-config", +] + +[[package]] +name = "xdg-home" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec1cdab258fb55c0da61328dc52c8764709b249011b2cad0454c72f0bf10a1f6" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "yoke" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zbus" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "192a0d989036cd60a1e91a54c9851fb9ad5bd96125d41803eed79d2e2ef74bd7" +dependencies = [ + "async-broadcast", + "async-executor", + "async-fs", + "async-io", + "async-lock", + "async-process", + "async-recursion", + "async-task", + "async-trait", + "blocking", + "enumflags2", + "event-listener", + "futures-core", + "futures-util", + "hex", + "nix", + "ordered-stream", + "serde", + "serde_repr", + "static_assertions", + "tokio", + "tracing", + "uds_windows", + "windows-sys 0.59.0", + "winnow 0.6.24", + "xdg-home", + "zbus_macros", + "zbus_names", + "zvariant", +] + +[[package]] +name = "zbus_macros" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3685b5c81fce630efc3e143a4ded235b107f1b1cdf186c3f115529e5e5ae4265" +dependencies = [ + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "syn 2.0.96", + "zbus_names", + "zvariant", + "zvariant_utils", +] + +[[package]] +name = "zbus_names" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "519629a3f80976d89c575895b05677cbc45eaf9f70d62a364d819ba646409cc8" +dependencies = [ + "serde", + "static_assertions", + "winnow 0.6.24", + "zvariant", +] + +[[package]] +name = "zerocopy" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +dependencies = [ + "byteorder", + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zerofrom" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", + "synstructure", +] + +[[package]] +name = "zerovec" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.96", +] + +[[package]] +name = "zvariant" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55e6b9b5f1361de2d5e7d9fd1ee5f6f7fcb6060618a1f82f3472f58f2b8d4be9" +dependencies = [ + "endi", + "enumflags2", + "serde", + "static_assertions", + "url", + "winnow 0.6.24", + "zvariant_derive", + "zvariant_utils", +] + +[[package]] +name = "zvariant_derive" +version = "5.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "573a8dd76961957108b10f7a45bac6ab1ea3e9b7fe01aff88325dc57bb8f5c8b" +dependencies = [ + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "syn 2.0.96", + "zvariant_utils", +] + +[[package]] +name = "zvariant_utils" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd46446ea2a1f353bfda53e35f17633afa79f4fe290a611c94645c69fe96a50" +dependencies = [ + "proc-macro2", + "quote", + "serde", + "static_assertions", + "syn 2.0.96", + "winnow 0.6.24", +] diff --git a/src-tauri/Cargo.toml b/src-tauri/Cargo.toml new file mode 100644 index 0000000..2aaa1d2 --- /dev/null +++ b/src-tauri/Cargo.toml @@ -0,0 +1,29 @@ +[package] +name = "pdf-forge" +version = "0.1.0" +description = "A Tauri App" +authors = ["you"] +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +# The `_lib` suffix may seem redundant but it is necessary +# to make the lib name unique and wouldn't conflict with the bin name. +# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519 +name = "pdf_forge_lib" +crate-type = ["staticlib", "cdylib", "rlib"] + +[build-dependencies] +tauri-build = { version = "2", features = [] } + +[dependencies] +tauri = { version = "2", features = [] } +tauri-plugin-opener = "2" +serde = { version = "1", features = ["derive"] } +serde_json = "1" +pdf = { path = "../src-pdfrs/pdf", features = ["cache"] } +tauri-plugin-fs = "2" +tauri-plugin-dialog = "2" +uuid = { version = "1.12.0", features = ["v4"] } + diff --git a/src-tauri/build.rs b/src-tauri/build.rs new file mode 100644 index 0000000..d860e1e --- /dev/null +++ b/src-tauri/build.rs @@ -0,0 +1,3 @@ +fn main() { + tauri_build::build() +} diff --git a/src-tauri/capabilities/default.json b/src-tauri/capabilities/default.json new file mode 100644 index 0000000..201f411 --- /dev/null +++ b/src-tauri/capabilities/default.json @@ -0,0 +1,20 @@ +{ + "$schema": "../gen/schemas/desktop-schema.json", + "identifier": "default", + "description": "Capability for the main window", + "windows": [ + "main" + ], + "permissions": [ + "core:default", + "opener:default", + "fs:default", + "dialog:default", + "core:window:default", + "core:window:allow-start-dragging", + "core:window:allow-close", + "core:window:allow-minimize", + "core:window:allow-toggle-maximize", + "core:window:allow-internal-toggle-maximize" + ] +} \ No newline at end of file diff --git a/src-tauri/src/lib.rs b/src-tauri/src/lib.rs new file mode 100644 index 0000000..4fc8a04 --- /dev/null +++ b/src-tauri/src/lib.rs @@ -0,0 +1,684 @@ +extern crate pdf; + +use crate::pdf::object::Resolve; + +use pdf::content::Op; +use pdf::file::{File, FileOptions, NoLog, ObjectCache, StreamCache}; +use pdf::object::{InfoDict, Object, ObjectWrite, PlainRef}; +use pdf::primitive::Primitive; +use pdf::xref::XRef; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, VecDeque}; +use std::fmt::format; +use std::ops::DerefMut; +use std::path::Path; +use std::sync::{Mutex, MutexGuard}; +use tauri::{Manager, State}; +use uuid::Uuid; + +type CosFile = File, ObjectCache, StreamCache, NoLog>; + +macro_rules! t { + ($result:expr) => {{ + match $result { + Ok(f) => f, + Err(e) => return Err(e.to_string()), + } + }}; +} +#[derive(Serialize, Debug, Clone)] +pub struct XRefTableModel { + pub size: usize, + pub entries: Vec, +} +#[derive(Serialize, Debug, Clone)] +pub struct XRefEntryModel { + pub obj_num: u64, + pub gen_num: u64, + pub obj_type: String, + pub offset: u64, +} +#[derive(Serialize, Debug, Clone)] +pub struct PdfFile { + pub id: String, + pub name: String, + pub path: String, + pub page_count: u32, + pub xref_entries: usize, + pub pages: Vec, +} + + +#[derive(Serialize, Debug, Clone)] +pub struct PrimitiveModel { + pub key: String, + pub ptype: String, + pub sub_type: String, + pub value: String, + pub children: Vec, + pub detail_path: Vec, +} + +#[derive(Serialize, Debug, Clone)] +pub struct DetailPathStep { + pub key: String, + pub last_jump: String, +} +impl DetailPathStep { + fn new(key: String, last_jump: String) -> DetailPathStep { + DetailPathStep { key, last_jump } + } +} + + +#[derive(Serialize, Debug, Clone)] +pub struct PageModel { + key: String, + id: u64, +} +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct TreeViewNode { + key: String, + children: Vec, +} + +impl TreeViewNode { + fn step(&self) -> Step { + Step::parse_step(&self.key) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone)] +pub struct ContentsModel { + parts: Vec> +} + +#[tauri::command] +fn get_all_files(session: State>) -> Vec { + let files = &session.lock().unwrap().files; + files + .values() + .map(|sf| sf.pdf_file.clone()) + .collect::>() +} + +#[tauri::command] +fn get_all_file_ids(session: State>) -> Vec { + let files = &session.lock().unwrap().files; + files + .values() + .map(|sf| sf.pdf_file.id.clone()) + .collect::>() +} + +#[tauri::command] +fn close_file(id: &str, session: State>) { + session.lock().unwrap().deref_mut().handle_close(&id); +} + +#[tauri::command] +fn get_file_by_id(id: &str, session: State>) -> Result { + let session_guard = session + .lock() + .map_err(|_| "Failed to lock the session mutex.".to_string())?; + let file = &get_file_from_state(id, &session_guard)?; + Ok(file.pdf_file.clone()) +} + +#[tauri::command] +fn upload(path: &str, session: State>) -> Result { + let file = t!(FileOptions::cached().open(path)); + + let pdf_file = to_pdf_file(path, &file)?; + + session + .lock() + .unwrap() + .deref_mut() + .handle_upload(&pdf_file, file); + + Ok(pdf_file.id.to_string()) +} + +fn to_pdf_file(path: &str, file: &CosFile) -> Result { + + fn parse_title_from_path(path: &str) -> Option { + Path::new(path).file_name() + .and_then(|f| f.to_str().map(|s| s.to_string())) + } + + let file_name = if let Some(ref info) = file.trailer.info_dict { + info.title.as_ref().map(|p| p.to_string_lossy()) + .unwrap_or( parse_title_from_path(path) + .unwrap_or_else(|| "Not found".to_string())) + } else { + "Not found".to_string() + }; + + + let pages = file.pages().enumerate().map(|(i, page_ref)| PageModel { key: format!("Page {}", i + 1), id: page_ref.unwrap().get_ref().get_inner().id }).collect(); + + let pdf_file = PdfFile { + id: Uuid::new_v4().to_string(), + name: file_name.to_string().into(), + path: path.to_string().into(), + page_count: file.num_pages(), + xref_entries: file.get_xref().len(), + pages: pages, + }; + Ok(pdf_file) +} + +#[tauri::command] +fn get_contents(id: &str, path: &str, session: State>) -> Result { + let session_guard = session + .lock() + .map_err(|_| "Failed to lock the session mutex.".to_string())?; + let file = get_file_from_state(path, &session_guard)?; + + let (_, page_prim, _) = get_prim_by_path_with_file(id, &file.cos_file)?; + let resolver = file.cos_file.resolver(); + + let page = t!(pdf::object::Page::from_primitive(page_prim, &resolver)); + if let Some(contents) = page.contents { + let mut parts = vec![]; + for part in contents.parts { + let data = &t!(part.data(&resolver)); + let ops = t!(pdf::content::parse_ops(&data, &resolver)); + let part = t!(pdf::content::display_ops(&ops)); + parts.push(part); + }; + return Ok(ContentsModel {parts}); + } + Err(String::from("Error occurred")) +} + +#[tauri::command] +fn get_prim_by_path( + id: &str, + path: &str, + session: State>, +) -> Result { + let session_guard = session + .lock() + .map_err(|_| "Failed to lock the session mutex.".to_string())?; + let file = get_file_from_state(id, &session_guard)?; + + get_prim_model_by_path_with_file(path, &file.cos_file) +} +fn get_prim_model_by_path_with_file(path: &str, file: &CosFile) -> Result { + let (key, prim, detail_path) = get_prim_by_path_with_file(path, file)?; + + Ok(PrimitiveModel::from_primitive_with_children( + key, + &prim, + detail_path + )) +} + +fn get_prim_by_path_with_file(path: &str, file: &CosFile) -> Result<(String, Primitive, Vec), String> { + let mut steps = Step::parse(path); + if steps.len() == 0 { + return Err(String::from(format!("{} is not a valid path!", path))); + } + let mut step = steps.pop_front().unwrap(); + let mut parent = match step { + Step::Number(obj_num) => resolve_xref(obj_num, file)?, + Step::Trailer => retrieve_trailer(file), + _ => return Err(String::from(format!("{} is not a valid path!", path))), + }; + + let mut detail_path = vec![DetailPathStep::new(step.get_key(), step.get_key())]; + let mut last_jump = step.get_key(); + + let mut current_prim = &parent; + while !steps.is_empty() { + step = steps.pop_front().unwrap(); + + current_prim = resolve_step(¤t_prim, &step)?; + if let Primitive::Reference(xref) = current_prim { + last_jump = xref.id.to_string(); + parent = resolve_xref(xref.id, file)?; + current_prim = &parent; + } + detail_path.push(DetailPathStep::new(step.get_key(), last_jump.clone())); + } + Ok((step.get_key(), current_prim.clone(), detail_path)) +} + +#[tauri::command] +fn get_prim_tree_by_path( + id: &str, + path: TreeViewNode, + session: State>, +) -> Result { + let session_guard = session + .lock() + .map_err(|_| "Failed to lock the session mutex.".to_string())?; + let file = get_file_from_state(id, &session_guard)?; + + get_prim_tree_by_path_with_file(path, &file.cos_file) +} + +fn get_prim_tree_by_path_with_file( + node: TreeViewNode, + file: &CosFile, +) -> Result { + let step = node.step(); + let parent = match step { + Step::Number(obj_num) => resolve_xref(obj_num, file)?, + Step::Trailer => retrieve_trailer(file), + _ => return Err(String::from(format!("{:?} is not a valid path!", node))), + }; + let path = vec![DetailPathStep::new(step.get_key(), step.get_key())]; + + let mut parent_model = PrimitiveModel::from_primitive_with_children(step.get_key(), &parent, path); + for child in node.children.iter() { + expand(child, &mut parent_model, &parent, file)?; + } + + Ok(parent_model) +} + +fn expand( + node: &TreeViewNode, + parent_model: &mut PrimitiveModel, + parent: &Primitive, + file: &CosFile, +) -> Result<(), String> { + let step = node.step(); + let prim = resolve_step(parent, &step)?; + if let Primitive::Reference(x_ref) = prim { + let jump = resolve_xref(x_ref.id, file)?; + // parent_model.ptype = format!("{}-Reference", jump.get_debug_name()); + let mut to_expand = parent_model.get_child(step.get_key()).unwrap(); + to_expand.add_children(&jump, append_path_with_jump(step.get_key(), x_ref.id.to_string(), &to_expand.detail_path)); + expand_children(node, file, &jump, &mut to_expand)?; + } else { + let mut to_expand = parent_model.get_child(step.get_key()).unwrap(); + to_expand.add_children(prim, append_path(step.get_key(), &to_expand.detail_path)); + expand_children(node, file, prim, &mut to_expand)?; + } + Ok(()) +} + +fn expand_children( + node: &TreeViewNode, + file: &CosFile, + prim: &Primitive, + mut expanded: &mut PrimitiveModel, +) -> Result<(), String> { + for child in node.children.iter() { + expand(child, &mut expanded, prim, file)?; + } + + Ok(()) +} + +fn resolve_step<'a>(current_prim: &'a Primitive, step: &Step) -> Result<&'a Primitive, String> { + Ok(match step { + Step::Number(index) => match current_prim { + Primitive::Array(prim_array) => { + let i = index.clone() as usize; + if prim_array.len() <= i { + return Err(String::from(format!( + "{} index out of bounds!", + step.get_key() + ))); + } + &prim_array[i] + } + p => { + return Err(String::from(format!( + "{} is not indexed with numbers!", + p.get_debug_name() + ))) + } + }, + Step::String(key) => match current_prim { + Primitive::Dictionary(dict) => match dict.get(key) { + Some(prim) => prim, + None => { + return Err(String::from(format!( + "Key {} does not exist in Dictionary!", + key + ))) + } + }, + Primitive::Stream(stream) => match stream.info.get(key) { + Some(prim) => prim, + None => { + return Err(String::from(format!( + "Key {} does not exist in Info Dictionary!", + key + ))) + } + }, + p => { + return Err(String::from(format!( + "{} has no String paths!", + p.get_debug_name() + ))) + } + }, + Step::Data => return Err("Not implemented!".to_string()), + _ => return Err(format!("Invalid Step: {}", step.get_key())), + }) +} + +fn retrieve_trailer(file: &CosFile) -> Primitive { + let mut updater = FileOptions::uncached().storage(); + file.trailer.to_primitive(&mut updater).unwrap() +} + +#[derive(Debug)] +pub enum Step { + String(String), + Number(u64), + Trailer, + Data, +} + +impl Step { + fn parse_step(path: &str) -> Step { + match &path.parse::().ok() { + Some(i) => Step::Number(*i), + None => match &path[..] { + "Data" => Step::Data, + "/" => Step::Trailer, + _ => Step::String(path.to_string().clone()), + }, + } + } + + fn parse(path: &str) -> VecDeque { + let mut steps = VecDeque::new(); + + if path.starts_with("/") { + steps.push_back(Step::Trailer); + } + let split_path = path.split("/").collect::>(); + for path_component in split_path { + if path_component.len() == 0 { + continue; + } + let step = match &path_component.parse::().ok() { + Some(i) => Step::Number(*i), + None => match path_component { + "Data" => Step::Data, + _ => Step::String(path_component.to_string().clone()), + }, + }; + steps.push_back(step); + } + steps + } + fn get_key(&self) -> String { + match self { + Step::String(s) => s.clone(), + Step::Number(i) => i.to_string(), + Step::Trailer => "/".to_string(), + Step::Data => "Data".into(), + } + } +} + +fn resolve_xref(id: u64, file: &CosFile) -> Result { + let plain_ref = PlainRef { id, gen: 0 }; + file.resolver() + .resolve(plain_ref) + .map_err(|e| e.to_string()) +} + +fn get_file_from_state<'a>( + id: &str, + session_guard: &'a MutexGuard, +) -> Result<&'a SessionFile, String> { + session_guard + .files + .get(id) + .ok_or_else(|| format!("File with id {} does not exist!", id)) +} + +fn append_path_with_jump(key: String, last_jump: String, path: &Vec) -> Vec { + let mut new_path = path.clone(); + new_path.push(DetailPathStep::new(key, last_jump)); + new_path +} + +fn append_path(key: String, path: &Vec) -> Vec { + let mut new_path = path.clone(); + let last_jump = new_path.last().unwrap().last_jump.clone(); + new_path.push(DetailPathStep::new(key, last_jump)); + new_path +} + + +impl PrimitiveModel { + fn from_primitive(key: String, primitive: &Primitive, path: Vec) -> PrimitiveModel { + let value: String = match primitive { + Primitive::Null => "Null".to_string(), + Primitive::Integer(i) => i.to_string(), + Primitive::Number(f) => f.to_string(), + Primitive::Boolean(b) => b.to_string(), + Primitive::String(s) => s.to_string().unwrap_or(String::new()), + Primitive::Stream(_) => "-".to_string(), + Primitive::Dictionary(_) => "-".to_string(), + Primitive::Array(arr) =>PrimitiveModel::format_arr_content(arr), + Primitive::Reference(pref) => { + format!("Obj Number: {} Gen Number: {}", pref.id, pref.gen) + } + + Primitive::Name(name) => name.clone().as_str().to_string(), + }; + let sub_type: String = match primitive { + Primitive::Dictionary(d) => d + .get("Type") + .and_then(|value| match value { + Primitive::Name(name) => Some(name.clone().as_str().to_string()), + _ => None + }) + .unwrap_or(String::from("-")), + _ => String::from("-") + }; + PrimitiveModel { + key: key, + ptype: primitive.get_debug_name().into(), + sub_type: sub_type, + value: value, + children: Vec::new(), + detail_path: path, + } + } + + fn format_arr_content(arr: &Vec) -> String { + if arr.len() == 0 { + return "[]".to_string(); + } + let mut result = String::from("["); + let contents = if arr.len() > 4 { &arr[0..4] } else { &arr[..] }; + for i in 0..contents.len() { + let prim = contents.get(i).unwrap(); + result.push_str(&match prim { + Primitive::Integer(i) => format!("{}", i), + Primitive::Number(n) => format!("{}", n), + Primitive::Boolean(b) => format!("{}", b), + Primitive::String(s) => s.to_string().unwrap_or(String::from("-")), + Primitive::Name(n) => n.as_str().to_string(), + _ => prim.get_debug_name().to_string(), + }); + if i != contents.len() - 1 { + result.push_str(", "); + } + } + + if arr.len() > 4 { + result.push_str(",..."); + } + result.push_str("]"); + result + } + + fn from_primitive_with_children(key: String, primitive: &Primitive, path: Vec) -> PrimitiveModel { + let mut model = PrimitiveModel::from_primitive(key, primitive, path.clone()); + model.add_children(primitive, path); + model + } + + fn add_children(&mut self, primitive: &Primitive, path: Vec) { + match primitive { + Primitive::Dictionary(dict) => dict.iter().for_each(|(name, value)| { + self.add_child(name.clone().as_str().to_string(), value, append_path(name.clone().as_str().to_string(), &path)); + }), + Primitive::Array(arr) => arr.iter().enumerate().for_each(|(i, obj)| { + self.add_child(i.to_string(), obj, append_path(i.to_string(), &path)); + }), + Primitive::Stream(stream) => { + self.children.push(PrimitiveModel { + key: "Data".to_string(), + ptype: "Stream Data".to_string(), + sub_type: "-".to_string(), + value: "".to_string(), + children: vec![], + detail_path: append_path("Data".to_string(), &path), + }); + stream.info.iter().for_each(|(name, value)| { + self.add_child(name.clone().as_str().to_string(), value, append_path(name.clone().as_str().to_string(), &path)); + }) + } + _ => (), + }; + } + + fn add_child(&mut self, key: String, child: &Primitive, path: Vec) -> &PrimitiveModel { + let child_model = Self::from_primitive(key, child, path); + self.children.push(child_model); + &self.children[self.children.len() - 1] + } + + fn get_child(&mut self, key: String) -> Option<&mut PrimitiveModel> { + self.children.iter_mut().find(|child| child.key == key) + } +} +#[tauri::command] +fn get_xref_table(id: &str, session: State>) -> Result { + let session_guard = session + .lock() + .map_err(|_| "Failed to lock the session mutex.".to_string())?; + let file = get_file_from_state(id, &session_guard)?; + get_xref_table_model_with_file(&file.cos_file) +} +fn get_xref_table_model_with_file(file: &CosFile) -> Result { + let resolver = file.resolver(); + let x_ref_table = file.get_xref(); + let mut models: Vec = Vec::new(); + + for (i, x_ref) in x_ref_table.iter_real().enumerate() { + models.push(match x_ref { + XRef::Raw { pos, gen_nr } => { + let prim: Primitive = resolver + .resolve(PlainRef { + id: i as u64, + gen: *gen_nr, + }) + .unwrap(); + XRefEntryModel { + obj_num: i as u64, + gen_num: *gen_nr, + obj_type: prim.get_debug_name().to_string().into(), + offset: *pos as u64, + } + } + XRef::Stream { stream_id, index } => XRefEntryModel { + obj_num: i as u64, + gen_num: *stream_id as u64, + obj_type: "Stream".into(), + offset: *index as u64, + }, + XRef::Free { + next_obj_nr, + gen_nr, + } => XRefEntryModel { + obj_num: i as u64, + gen_num: *gen_nr as u64, + obj_type: "Free".into(), + offset: *next_obj_nr as u64, + }, + XRef::Promised => XRefEntryModel { + obj_num: i as u64, + gen_num: 0, + obj_type: "Promised".into(), + offset: 0, + }, + XRef::Invalid => XRefEntryModel { + obj_num: i as u64, + gen_num: 0, + obj_type: "Invalid".into(), + offset: 0, + }, + }); + } + Ok(XRefTableModel { + size: x_ref_table.len(), + entries: models, + }) +} + +struct Session { + files: HashMap, +} + +struct SessionFile { + pdf_file: PdfFile, + cos_file: CosFile, +} + +unsafe impl Send for SessionFile {} +unsafe impl Sync for SessionFile {} + +impl Session { + fn load() -> Session { + Session { + files: HashMap::new(), + } + } + + fn handle_upload(&mut self, pdf_file: &PdfFile, cos_file: CosFile) { + self.files.insert( + pdf_file.id.clone(), + SessionFile { + pdf_file: pdf_file.clone(), + cos_file: cos_file, + }, + ); + } + + fn handle_close(&mut self, id: &str) { + self.files.remove(id); + } +} +#[cfg_attr(mobile, tauri::mobile_entry_point)] +pub fn run() { + tauri::Builder::default() + .plugin(tauri_plugin_dialog::init()) + .plugin(tauri_plugin_fs::init()) + .plugin(tauri_plugin_opener::init()) + .setup(|app| { + app.manage(Mutex::new(Session::load())); + Ok(()) + }) + .invoke_handler(tauri::generate_handler![ + upload, + get_all_files, + get_all_file_ids, + get_file_by_id, + close_file, + get_prim_by_path, + get_prim_tree_by_path, + get_xref_table, + get_contents + ]) + .run(tauri::generate_context!()) + .expect("error while running tauri application"); +} + diff --git a/src-tauri/src/main.rs b/src-tauri/src/main.rs new file mode 100644 index 0000000..1c58688 --- /dev/null +++ b/src-tauri/src/main.rs @@ -0,0 +1,8 @@ +// Prevents additional console window on Windows in release, DO NOT REMOVE!! +#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] + +use pdf_forge_lib::run; + +fn main() { + run(); +} diff --git a/src-tauri/src/tests.rs b/src-tauri/src/tests.rs new file mode 100644 index 0000000..b2f23c2 --- /dev/null +++ b/src-tauri/src/tests.rs @@ -0,0 +1,162 @@ +extern crate pdf; + +#[cfg(test)] +mod tests { + + use crate::{ + get_prim_by_path_with_file, get_prim_model_by_path_with_file, get_prim_tree_by_path_with_file, get_xref_table_model_with_file, to_pdf_file, DetailPathStep, PrimitiveModel, TreeViewNode + }; + + use pdf::content::{display_ops, serialize_ops, Op}; + use pdf::file::FileOptions; + use pdf::object::{Object, ObjectWrite, Page, PlainRef, Resolve}; + use pdf::primitive::Primitive; + use std::time::Instant; + macro_rules! timed { + ($func_call:expr, $label:expr) => {{ + let start = std::time::Instant::now(); + let result = $func_call; + let duration = std::time::Instant::now().duration_since(start); + println!("{} took {:?}", $label, duration); + result + }}; + } + // Import items to be tested from the parent module + const FILE_PATH: &str = + "/home/kschuettler/Dokumente/Scientific Papers/PDF Specification/ISO_32000-2_2020(en).pdf"; + + #[test] + fn test_read_x_ref() { + let start = Instant::now(); + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + let resolver = file.resolver(); + let refs = get_xref_table_model_with_file(&file).unwrap().entries; + let time = Instant::now().duration_since(start); + println!("retrieving {} primitives took {:?}", refs.len(), time); + let start = Instant::now(); + let ex = refs.get(19368).ok_or(Err::("wtf")).unwrap(); + let prim: Primitive = resolver + .resolve(PlainRef { + id: ex.obj_num, + gen: ex.gen_num, + }) + .unwrap(); + let time = Instant::now().duration_since(start); + println!("{:?}", prim); + println!("retrieving one primitive took {:?}", time); + let start = Instant::now(); + let refs = get_xref_table_model_with_file(&file).unwrap().entries; + let time = Instant::now().duration_since(start); + println!("retrieving {} primitives again took {:?}", refs.len(), time); + } + #[test] + fn test_read_tree() { + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + let mut path = Vec::new(); + path.push(TreeViewNode { + key: "Index".to_string(), + children: vec![TreeViewNode { + key: "1".to_string(), + children: vec![], + }], + }); + path.push(TreeViewNode { + key: "Info".to_string(), + children: vec![], + }); + path.push(TreeViewNode { + key: "Root".to_string(), + children: vec![TreeViewNode { + key: "Pages".to_string(), + children: vec![], + }], + }); + let root = TreeViewNode { + key: "/".to_string(), + children: path, + }; + + let message = format!("Retrieval of {:?}", root); + let prim = timed!(get_prim_tree_by_path_with_file(root, &file), message); + print_node(prim.unwrap(), 0); + } + #[test] + fn test_read_by_path() { + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + let path = "/Root/Pages"; + + let message = format!("Retrieval of {:?}", path); + let prim = timed!(get_prim_model_by_path_with_file(path, &file), message); + print_node(prim.unwrap(), 0); + } + + fn print_node(node: PrimitiveModel, depth: usize) { + let spaces = " ".repeat(depth); + println!("{:?}", node.detail_path); + println!("{}{} | {} | {}", spaces, node.key, node.ptype, node.value); + for child in node.children { + print_node(child, depth + 1); + } + } + #[test] + fn test_read_trailer() { + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + let mut file2 = timed!( + FileOptions::uncached().storage(), + "Loading storage" + ); + + + let trail = timed!(file.trailer.to_primitive(&mut file2).unwrap(), "writing trailer"); + let trail_model = PrimitiveModel::from_primitive_with_children("Trailer".to_string(), &trail, vec![DetailPathStep::new("/".to_string(), "/".to_string())]); + print_node(trail_model, 5); + println!("{:?}", file.trailer.info_dict); + } + + #[test] + fn test_read_pdf_file() { + use crate::to_pdf_file; + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + + let _pdf_file = timed!(to_pdf_file(FILE_PATH, &file), "pages 1"); + let pdf_file = timed!(to_pdf_file(FILE_PATH, &file), "pages 2"); + println!("{:?}", pdf_file); + } + #[test] + fn test_read_contents() { + + let file = timed!( + FileOptions::cached().open(FILE_PATH).unwrap(), + "Loading file" + ); + + let (_, page2_prim, _) = get_prim_by_path_with_file("1", &file).unwrap(); + let resolver = file.resolver(); + let page2 = Page::from_primitive(page2_prim, &resolver).unwrap(); + let mut ops: Vec = timed!(page2.contents.unwrap().operations(&resolver).unwrap(), "parse ops"); + let serialized = timed!(serialize_ops(&mut ops).unwrap(), "serializing"); + let display = timed!(display_ops(&mut ops).unwrap(), "displaying"); + println!("Serialized -----------------------------------------------------------------"); + println!("{}", String::from_utf8(serialized).unwrap()); + println!("Displayed -----------------------------------------------------------------"); + for (line, s) in display.iter().enumerate() { + println!("{}: {}", line, s); + } + + } +} \ No newline at end of file diff --git a/src-tauri/tauri.conf.json b/src-tauri/tauri.conf.json new file mode 100644 index 0000000..df14239 --- /dev/null +++ b/src-tauri/tauri.conf.json @@ -0,0 +1,36 @@ +{ + "$schema": "https://schema.tauri.app/config/2", + "productName": "pdf-forge", + "version": "0.1.0", + "identifier": "pdf-forge", + "build": { + "beforeDevCommand": "yarn dev", + "devUrl": "http://localhost:1420", + "beforeBuildCommand": "yarn build", + "frontendDist": "../build" + }, + "app": { + "windows": [ + { + "decorations": false, + "title": "PDF Forge", + "width": 1920, + "height": 1080 + } + ], + "security": { + "csp": null + } + }, + "bundle": { + "active": true, + "targets": "all", + "icon": [ + "icons/32x32.png", + "icons/128x128.png", + "icons/128x128@2x.png", + "icons/icon.icns", + "icons/icon.ico" + ] + } +} diff --git a/src/app.css b/src/app.css new file mode 100644 index 0000000..09b5d55 --- /dev/null +++ b/src/app.css @@ -0,0 +1,32 @@ +@import "tailwindcss/base"; +@import "tailwindcss/components"; +@import "tailwindcss/utilities"; + +:root { + /* Colors */ + --background-color: rgb(43, 45, 48); + --boundary-color: rgba(0, 0, 0, 0.29); + --secondary-color: rgba(103, 101, 101, 0.6); + --accent-color: rgb(44, 97, 97); + --font-color: #c0cacd; + --secondary-font-color: #6c6c6c; +} + +body { + margin: 0; + font-family: 'Arial', sans-serif; + background-color: var(--background-color); + color: var(--font-color); + border-color: var(--secondary-color) +} +::before, ::after { + border-color: var(--secondary-color); +} + +.full-container { + height: 100%; + width: 100%; +} + + + diff --git a/src/app.html b/src/app.html new file mode 100644 index 0000000..ba9605a --- /dev/null +++ b/src/app.html @@ -0,0 +1,13 @@ + + + + + + + PDF Forge + %sveltekit.head% + + +

%sveltekit.body%
+ + diff --git a/src/components/App.svelte b/src/components/App.svelte new file mode 100644 index 0000000..932095f --- /dev/null +++ b/src/components/App.svelte @@ -0,0 +1,287 @@ + + +
+
+ +
+
+ + + + + + + {#if (fState)} + + {:else} + + {/if} + + + + + +
+
+
+ + + diff --git a/src/components/ContentsView.svelte b/src/components/ContentsView.svelte new file mode 100644 index 0000000..61dafc7 --- /dev/null +++ b/src/components/ContentsView.svelte @@ -0,0 +1,36 @@ + +{#if contents} +
+
+ {#each contents.parts as part } +
+ {#each part as line} +
{line}
+ {/each} + +
+ {/each} +
+
+{:else} + {"Loading id: " + id + " Path: " + path} +{/if} + + diff --git a/src/components/DocumentView.svelte b/src/components/DocumentView.svelte new file mode 100644 index 0000000..22ef9ea --- /dev/null +++ b/src/components/DocumentView.svelte @@ -0,0 +1,46 @@ + + + +
+
+

{file.name}

+
COS Path:
+

{path}

+
Location:
+

{file.path}

+
Pages:
+

{file.page_count}

+
Last modified:
+

{file.last_modified}

+
+
+ \ No newline at end of file diff --git a/src/components/FileView.svelte b/src/components/FileView.svelte new file mode 100644 index 0000000..dd56467 --- /dev/null +++ b/src/components/FileView.svelte @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + + + +
+ +
+
+ + + + +
+ \ No newline at end of file diff --git a/src/components/Footer.svelte b/src/components/Footer.svelte new file mode 100644 index 0000000..2074682 --- /dev/null +++ b/src/components/Footer.svelte @@ -0,0 +1,74 @@ + + +
+
+ {#if elements} + {#each elements as path} + + {/each} + {/if} +
+
+ + diff --git a/src/components/PageList.svelte b/src/components/PageList.svelte new file mode 100644 index 0000000..627b277 --- /dev/null +++ b/src/components/PageList.svelte @@ -0,0 +1,82 @@ + + +
+
+
+ + + + + + + +
PageRef
+
+ + + {#each fState.file.pages as page} + handlePageSelect(page)}> + + + + {/each} + +
+
+ +

+ {page.key} +

+
+
{page.id}
+
+
+
+
+ \ No newline at end of file diff --git a/src/components/PrimitiveIcon.svelte b/src/components/PrimitiveIcon.svelte new file mode 100644 index 0000000..a8f7fba --- /dev/null +++ b/src/components/PrimitiveIcon.svelte @@ -0,0 +1,35 @@ + + {#if ptype === "Dictionary"} + + {:else if ptype === "Array"} + + {:else if ptype === "Reference"} + + {:else if ptype === "Integer"} + + {:else if ptype === "Number"} + + {:else if ptype === "Boolean"} + + {:else if ptype === "String"} + + {:else if ptype === "Name"} + + {:else if ptype === "Stream Data"} + + {:else} + + {/if} + \ No newline at end of file diff --git a/src/components/PrimitiveView.svelte b/src/components/PrimitiveView.svelte new file mode 100644 index 0000000..e5976b8 --- /dev/null +++ b/src/components/PrimitiveView.svelte @@ -0,0 +1,136 @@ + + +{#if prim && prim.children && prim.children.length > 0} +
+
+ + + + + + + + +
KeyTypeValue
+
+ + + {#each prim.children as entry} + (selected = entry)} + ondblclick={() => handlePrimSelect(entry)} + > + + + + + {/each} + +
+
+ +

+ {entry.key} +

+
+
{entry.ptype}{entry.value}
+

+ Dict Type: {prim.sub_type} +

+ {#if prim.isPage()} + + {#if showContents} + + {/if} + {/if} +
+
+
+{/if} + + diff --git a/src/components/TabBar.svelte b/src/components/TabBar.svelte new file mode 100644 index 0000000..202b25f --- /dev/null +++ b/src/components/TabBar.svelte @@ -0,0 +1,72 @@ + +
+ {#each files as file} +
+
+ +
+
+ +
+
+ {/each} +
+ +
+
+ \ No newline at end of file diff --git a/src/components/TitleBar.svelte b/src/components/TitleBar.svelte new file mode 100644 index 0000000..429aa0d --- /dev/null +++ b/src/components/TitleBar.svelte @@ -0,0 +1,67 @@ + +
+
+ +
+
+ + + +
+
+ + \ No newline at end of file diff --git a/src/components/ToolbarLeft.svelte b/src/components/ToolbarLeft.svelte new file mode 100644 index 0000000..2c54137 --- /dev/null +++ b/src/components/ToolbarLeft.svelte @@ -0,0 +1,44 @@ + +
+ + +
+ + + \ No newline at end of file diff --git a/src/components/ToolbarRight.svelte b/src/components/ToolbarRight.svelte new file mode 100644 index 0000000..bd62929 --- /dev/null +++ b/src/components/ToolbarRight.svelte @@ -0,0 +1,36 @@ + +
+ + +
+ + + \ No newline at end of file diff --git a/src/components/TreeNode.svelte b/src/components/TreeNode.svelte new file mode 100644 index 0000000..62590fa --- /dev/null +++ b/src/components/TreeNode.svelte @@ -0,0 +1,138 @@ + +{#if prim} +
    + {#each prim.children as child} +
  • +
    + {#if child.children.length > 0} + + {:else if child.isContainer()} + + {:else} + + {/if} + +
    + {#if child.children.length > 0} + + {/if} +
  • + {/each} +
+{/if} + + + \ No newline at end of file diff --git a/src/components/TreeView.svelte b/src/components/TreeView.svelte new file mode 100644 index 0000000..afffea4 --- /dev/null +++ b/src/components/TreeView.svelte @@ -0,0 +1,39 @@ + +
+
+
    + {#if prim} +
  • +
    + + {"Trailer "} +
    + +
  • + {/if} +
+
+
+ \ No newline at end of file diff --git a/src/components/WelcomeScreen.svelte b/src/components/WelcomeScreen.svelte new file mode 100644 index 0000000..dbe330b --- /dev/null +++ b/src/components/WelcomeScreen.svelte @@ -0,0 +1,159 @@ + + +

Welcome to PDF Forge!

+
+ +
+
+ +
+ + + \ No newline at end of file diff --git a/src/components/XRefTable.svelte b/src/components/XRefTable.svelte new file mode 100644 index 0000000..93e0dee --- /dev/null +++ b/src/components/XRefTable.svelte @@ -0,0 +1,134 @@ + +
+
+
+ + + + + + + + + fState.selectXref(undefined)}> + + + + + + +
Obj NrGen NrTypeOffset
Trailer65535DictionaryEnd of file
+
+
+ + + + {#each entriesToDisplay as entry} + fState.selectXref(entry)}> + + + + + + {/each} + +
{entry.obj_num}{entry.gen_num}{entry.obj_type}{entry.offset}
+
+
+
+
+
+ + \ No newline at end of file diff --git a/src/models/ContentModel.svelte.ts b/src/models/ContentModel.svelte.ts new file mode 100644 index 0000000..0e4ad99 --- /dev/null +++ b/src/models/ContentModel.svelte.ts @@ -0,0 +1,3 @@ +export default interface ContentModel { + readonly parts: string[][]; +} \ No newline at end of file diff --git a/src/models/FileViewState.svelte.ts b/src/models/FileViewState.svelte.ts new file mode 100644 index 0000000..efbae32 --- /dev/null +++ b/src/models/FileViewState.svelte.ts @@ -0,0 +1,156 @@ +import type PdfFile from "./PdfFile"; +import type XRefEntry from "./XRefEntry"; +import Primitive from "./Primitive.svelte"; +import {invoke} from "@tauri-apps/api/core"; +import TreeViewNode from "./TreeViewNode.svelte"; +import type XRefTable from "./XRefTable"; + +export default class FileViewState { + + public path: string[] = $state(["/"]); + public treeRoot: TreeViewNode = $state(new TreeViewNode("/", [new TreeViewNode("Root", [])])); + public file: PdfFile; + public prim: Primitive | undefined = $state(); + public treeView: Primitive | undefined = $state(); + public xref_entries: XRefEntry[] = $state([]); + + + constructor(file: PdfFile) { + + this.file = file; + this.selectPath(this.path); + this.loadTreeView(); + this.loadXrefEntries() + } + + getLastJump(): string | number | undefined { + return this.prim?.getLastJump() + } + + public loadXrefEntries() { + invoke("get_xref_table", {id: this.file.id}) + .then(result => { + this.xref_entries = result.entries; + }) + .catch(err => console.error(err)); + } + + public selectPath(newPath: string[]) { + invoke("get_prim_by_path", {id: this.file.id, path: this.mergePaths(newPath)}) + .then(result => { + this.prim = new Primitive(result) + this.path = newPath + }) + .catch(err => console.error(err)); + } + + + public loadTreeView() { + invoke("get_prim_tree_by_path", {id: this.file.id, path: this.treeRoot}) + .then(result => { + this.treeView = new Primitive(result); + } + ).catch(err => console.error(err)) + } + + public getTreeRoot() { + return this.treeRoot; + } + + public expandTree(path: string[]) { + if (path.length == 0) { + console.error("Empty path") + return; + } + let node; + if (path[0] === "/") { + node = this.treeRoot; + } else { + console.error("Invalid Path " + path); + return; + } + for (let key of path.slice(1, path.length)) { + let _node: TreeViewNode | undefined = node.getChild(key) + if (_node) { + node = _node; + } else { + node = node.addChild(key) + } + } + this.loadTreeView(); + } + + public collapseTree(path: string[]) { + if (path.length == 0) { + console.error("Empty path") + return; + } + let node; + if (path[0] === "/") { + node = this.treeRoot; + } else { + console.error("Invalid Path " + path); + return; + } + if (path.length == 1) { + this.treeRoot.clearChildren(); + return; + } + for (let key of path.slice(1, path.length - 1)) { + if (node) { + node = node.getChild(key) + } + } + if (node) { + node.removeChild(path[path.length - 1]); + } + this.loadTreeView() + } + + public getMergedPath() { + return this.mergePaths(this.path); + } + + public displayPath() { + + } + + public popPath() { + let path = this.copyPath(); + if (path.length == 1) { + return + } + path.pop() + this.selectPath(path); + } + + public copyPath() { + const _path: string[] = []; + + for (let item of this.path) { + _path.push(item); + } + return _path; + } + + public selectXref(entry: XRefEntry | undefined) { + if (!entry) { + this.selectPath(["/"]) + return; + } + this.selectPath([entry.obj_num.toString()]); + } + + + public mergePaths(paths: string[]) { + if (paths.length == 0) { + return "/"; + } + if (paths[0] === "/") { + return "/" + paths.slice(1, paths.length).join("/") + } + return paths.join("/"); + } + + +} \ No newline at end of file diff --git a/src/models/PageModel.ts b/src/models/PageModel.ts new file mode 100644 index 0000000..2c3f304 --- /dev/null +++ b/src/models/PageModel.ts @@ -0,0 +1,5 @@ + +export default interface PageModel { + key: string, + id: number +} \ No newline at end of file diff --git a/src/models/PdfFile.ts b/src/models/PdfFile.ts new file mode 100644 index 0000000..a0013bf --- /dev/null +++ b/src/models/PdfFile.ts @@ -0,0 +1,11 @@ +import type PageModel from "./PageModel"; + +export default interface PdfFile { + readonly id: string; + readonly name: string; + readonly path: string; + readonly page_count: string; + readonly xref_entries: number; + readonly last_modified: Date; + readonly pages: PageModel[]; +} diff --git a/src/models/Primitive.svelte.ts b/src/models/Primitive.svelte.ts new file mode 100644 index 0000000..0385876 --- /dev/null +++ b/src/models/Primitive.svelte.ts @@ -0,0 +1,44 @@ +export default class Primitive { + public key: string; + public ptype: string; + public sub_type: string; + public value: string; + public children: Primitive[]; + public detail_path: DetailPath[] = $state([]); + + constructor( + p: Primitive + ) { + this.key = p.key; + this.ptype = p.ptype; + this.sub_type = p.sub_type; + this.value = p.value; + this.children = []; + for (let child of p.children) { + this.children.push(new Primitive(child)); + } + this.detail_path = []; + for (let path of p.detail_path) { + this.detail_path.push(path); + } + } + + public isContainer() { + return this.ptype === "Dictionary" || this.ptype === "Array" || this.ptype === "Reference" || this.ptype === "Stream"; + } + + public getLastJump(): string | number { + let path = this.detail_path[this.detail_path.length - 1].last_jump; + if (path === "/") {return path}; + return +path; + } + + public isPage(): boolean { + return this.sub_type === "Page"; + } +} + +export interface DetailPath { + readonly key: string ; + readonly last_jump: string ; +} diff --git a/src/models/ToolBarState.ts b/src/models/ToolBarState.ts new file mode 100644 index 0000000..e917b03 --- /dev/null +++ b/src/models/ToolBarState.ts @@ -0,0 +1,8 @@ +class ToolBarState { + constructor( + public xref: boolean = false, + public tree: boolean = false, + public pages: boolean = false + ) { + } +} \ No newline at end of file diff --git a/src/models/TreeViewNode.svelte.ts b/src/models/TreeViewNode.svelte.ts new file mode 100644 index 0000000..8536c8f --- /dev/null +++ b/src/models/TreeViewNode.svelte.ts @@ -0,0 +1,30 @@ +export default class TreeViewNode { + + public key: string; + public children: TreeViewNode[]; + constructor( + key: string, + children: TreeViewNode[] + ) { + this.key = key; + this.children = children; + } + + public getChild(key: string) { + return this.children.find(child => child.key === key); + } + + public addChild(key: string) { + let child = new TreeViewNode(key, []) + this.children.push(child); + return child; + } + + public clearChildren() { + this.children = []; + } + + public removeChild(key: string) { + this.children = this.children.filter(child => child.key !== key); + } +} \ No newline at end of file diff --git a/src/models/XRefEntry.ts b/src/models/XRefEntry.ts new file mode 100644 index 0000000..64caa90 --- /dev/null +++ b/src/models/XRefEntry.ts @@ -0,0 +1,7 @@ +export default interface XRefEntry { + readonly obj_type: string; + readonly obj_num: number | string; + readonly gen_num: number; + readonly offset: number | string; + readonly size: number; +} \ No newline at end of file diff --git a/src/models/XRefTable.ts b/src/models/XRefTable.ts new file mode 100644 index 0000000..a46ea26 --- /dev/null +++ b/src/models/XRefTable.ts @@ -0,0 +1,6 @@ +import type XRefEntry from "./XRefEntry"; + +export default interface XRefTable { + size: number + entries: XRefEntry[]; +} \ No newline at end of file diff --git a/src/routes/+layout.svelte b/src/routes/+layout.svelte new file mode 100644 index 0000000..1554d78 --- /dev/null +++ b/src/routes/+layout.svelte @@ -0,0 +1,5 @@ + + + + diff --git a/src/routes/+layout.ts b/src/routes/+layout.ts new file mode 100644 index 0000000..f4fb689 --- /dev/null +++ b/src/routes/+layout.ts @@ -0,0 +1,5 @@ +// Tauri doesn't have a Node.js server to do proper SSR +// so we will use adapter-static to prerender the app (SSG) +// See: https://v2.tauri.app/start/frontend/sveltekit/ for more info +export const prerender = true; +export const ssr = false; diff --git a/src/routes/+page.svelte b/src/routes/+page.svelte new file mode 100644 index 0000000..c51b4d3 --- /dev/null +++ b/src/routes/+page.svelte @@ -0,0 +1,5 @@ + + + diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..1928358 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,17 @@ +import type {Action} from "@sveltejs/kit"; + +export function arraysAreEqual(arr1: string[], arr2: string[]) { + if (arr1.length !== arr2.length) { + return false; // Arrays of different lengths are not equal + } + for (let i = 0; i < arr1.length; i++) { + if (arr1[i] !== arr2[i]) { + return false; // Mismatched element found + } + } + return true; // All elements match +} + +export function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); +} diff --git a/static/folder-simple.svg b/static/folder-simple.svg new file mode 100644 index 0000000..b4cdca1 --- /dev/null +++ b/static/folder-simple.svg @@ -0,0 +1,3 @@ + + + diff --git a/static/pdf-forge-logo-25x25.png b/static/pdf-forge-logo-25x25.png new file mode 100644 index 0000000..2101107 Binary files /dev/null and b/static/pdf-forge-logo-25x25.png differ diff --git a/static/pdf-forge-logo-30x30.png b/static/pdf-forge-logo-30x30.png new file mode 100644 index 0000000..64db825 Binary files /dev/null and b/static/pdf-forge-logo-30x30.png differ diff --git a/static/pdf-forge-logo-bg.png b/static/pdf-forge-logo-bg.png new file mode 100644 index 0000000..e68cb45 Binary files /dev/null and b/static/pdf-forge-logo-bg.png differ diff --git a/static/pdf-forge-logo.png b/static/pdf-forge-logo.png new file mode 100644 index 0000000..83b9db1 Binary files /dev/null and b/static/pdf-forge-logo.png differ diff --git a/svelte.config.js b/svelte.config.js new file mode 100644 index 0000000..c29abaa --- /dev/null +++ b/svelte.config.js @@ -0,0 +1,16 @@ +// Tauri doesn't have a Node.js server to do proper SSR +// so we will use adapter-static to prerender the app (SSG) +// See: https://v2.tauri.app/start/frontend/sveltekit/ for more info +import adapter from "@sveltejs/adapter-static"; +import { vitePreprocess } from "@sveltejs/vite-plugin-svelte"; +import {sveltePreprocess} from "svelte-preprocess"; + +/** @type {import('@sveltejs/kit').Config} */ +const config = { + kit: { + adapter: adapter(), + }, + preprocess: vitePreprocess(), +}; + +export default config; diff --git a/tailwind.config.ts b/tailwind.config.ts new file mode 100644 index 0000000..a9ba797 --- /dev/null +++ b/tailwind.config.ts @@ -0,0 +1,36 @@ +import type { Config } from "tailwindcss"; +import flowbitePlugin from 'flowbite/plugin' +export default { + content: ['./src/**/*.{html,js,svelte,ts}', './node_modules/flowbite-svelte/**/*.{html,js,svelte,ts}'], + darkMode: 'selector', + theme: { + extend: { + colors: { + // flowbite-svelte + primary: { + 50: '#FFF5F2', + 100: '#FFF1EE', + 200: '#FFE4DE', + 300: '#FFD5CC', + 400: '#FFBCAD', + 500: '#FE795D', + 600: '#EF562F', + 700: '#EB4F27', + 800: '#CC4522', + 900: '#A5371B' + }, + forge: { + dark: 'rgb(30,31,34)', + prim: 'rgb(43,45,48)', + sec: 'rgb(76,77,80)', + acc: 'rgb(44, 97, 97)', + active: 'rgb(44, 71, 73)', + bound: 'rgba(0, 0, 0, 0.29)', + text: '#dadada', + text_sec: '#6c6c6c', + } + } + } + }, + plugins: [flowbitePlugin] +} as Config; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..593dc19 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./.svelte-kit/tsconfig.json", + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "moduleResolution": "bundler" + } + // Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias + // except $lib which is handled by https://kit.svelte.dev/docs/configuration#files + // + // If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes + // from the referenced tsconfig.json - TypeScript does not merge them in +} diff --git a/vite.config.js b/vite.config.js new file mode 100644 index 0000000..a2ede43 --- /dev/null +++ b/vite.config.js @@ -0,0 +1,34 @@ +import {defineConfig} from "vite"; +import {sveltekit} from "@sveltejs/kit/vite"; + +// @ts-expect-error process is a nodejs global +const host = process.env.TAURI_DEV_HOST; + +// https://vitejs.dev/config/ +export default defineConfig(async () => ({ + plugins: [sveltekit()], + css: { + postcss: './postcss.config.cjs', // Path to PostCSS config + }, + // Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build` + // + // 1. prevent vite from obscuring rust errors + clearScreen: false, + // 2. tauri expects a fixed port, fail if that port is not available + server: { + port: 1420, + strictPort: true, + host: host || false, + hmr: host + ? { + protocol: "ws", + host, + port: 1421, + } + : undefined, + watch: { + // 3. tell vite to ignore watching `src-tauri` + ignored: ["**/src-tauri/**"], + }, + }, +})); diff --git a/yarn.lock b/yarn.lock new file mode 100644 index 0000000..7e1bda2 --- /dev/null +++ b/yarn.lock @@ -0,0 +1,1875 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +"@alloc/quick-lru@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@alloc/quick-lru/-/quick-lru-5.2.0.tgz#7bf68b20c0a350f936915fcae06f58e32007ce30" + integrity sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw== + +"@ampproject/remapping@^2.3.0": + version "2.3.0" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.3.0.tgz#ed441b6fa600072520ce18b43d2c8cc8caecc7f4" + integrity sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw== + dependencies: + "@jridgewell/gen-mapping" "^0.3.5" + "@jridgewell/trace-mapping" "^0.3.24" + +"@bufbuild/protobuf@^2.0.0": + version "2.2.3" + resolved "https://registry.yarnpkg.com/@bufbuild/protobuf/-/protobuf-2.2.3.tgz#9cd136f6b687e63e9b517b3a54211ece942897ee" + integrity sha512-tFQoXHJdkEOSwj5tRIZSPNUuXK3RaR7T1nUrPgbYX1pUbvqqaaZAsfo+NXBPsz5rZMSKVFrgK1WL8Q/MSLvprg== + +"@esbuild/aix-ppc64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.24.2.tgz#38848d3e25afe842a7943643cbcd387cc6e13461" + integrity sha512-thpVCb/rhxE/BnMLQ7GReQLLN8q9qbHmI55F4489/ByVg2aQaQ6kbcLb6FHkocZzQhxc4gx0sCk0tJkKBFzDhA== + +"@esbuild/android-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.24.2.tgz#f592957ae8b5643129fa889c79e69cd8669bb894" + integrity sha512-cNLgeqCqV8WxfcTIOeL4OAtSmL8JjcN6m09XIgro1Wi7cF4t/THaWEa7eL5CMoMBdjoHOTh/vwTO/o2TRXIyzg== + +"@esbuild/android-arm@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.24.2.tgz#72d8a2063aa630308af486a7e5cbcd1e134335b3" + integrity sha512-tmwl4hJkCfNHwFB3nBa8z1Uy3ypZpxqxfTQOcHX+xRByyYgunVbZ9MzUUfb0RxaHIMnbHagwAxuTL+tnNM+1/Q== + +"@esbuild/android-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.24.2.tgz#9a7713504d5f04792f33be9c197a882b2d88febb" + integrity sha512-B6Q0YQDqMx9D7rvIcsXfmJfvUYLoP722bgfBlO5cGvNVb5V/+Y7nhBE3mHV9OpxBf4eAS2S68KZztiPaWq4XYw== + +"@esbuild/darwin-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.24.2.tgz#02ae04ad8ebffd6e2ea096181b3366816b2b5936" + integrity sha512-kj3AnYWc+CekmZnS5IPu9D+HWtUI49hbnyqk0FLEJDbzCIQt7hg7ucF1SQAilhtYpIujfaHr6O0UHlzzSPdOeA== + +"@esbuild/darwin-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.24.2.tgz#9ec312bc29c60e1b6cecadc82bd504d8adaa19e9" + integrity sha512-WeSrmwwHaPkNR5H3yYfowhZcbriGqooyu3zI/3GGpF8AyUdsrrP0X6KumITGA9WOyiJavnGZUwPGvxvwfWPHIA== + +"@esbuild/freebsd-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.2.tgz#5e82f44cb4906d6aebf24497d6a068cfc152fa00" + integrity sha512-UN8HXjtJ0k/Mj6a9+5u6+2eZ2ERD7Edt1Q9IZiB5UZAIdPnVKDoG7mdTVGhHJIeEml60JteamR3qhsr1r8gXvg== + +"@esbuild/freebsd-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.24.2.tgz#3fb1ce92f276168b75074b4e51aa0d8141ecce7f" + integrity sha512-TvW7wE/89PYW+IevEJXZ5sF6gJRDY/14hyIGFXdIucxCsbRmLUcjseQu1SyTko+2idmCw94TgyaEZi9HUSOe3Q== + +"@esbuild/linux-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.24.2.tgz#856b632d79eb80aec0864381efd29de8fd0b1f43" + integrity sha512-7HnAD6074BW43YvvUmE/35Id9/NB7BeX5EoNkK9obndmZBUk8xmJJeU7DwmUeN7tkysslb2eSl6CTrYz6oEMQg== + +"@esbuild/linux-arm@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.24.2.tgz#c846b4694dc5a75d1444f52257ccc5659021b736" + integrity sha512-n0WRM/gWIdU29J57hJyUdIsk0WarGd6To0s+Y+LwvlC55wt+GT/OgkwoXCXvIue1i1sSNWblHEig00GBWiJgfA== + +"@esbuild/linux-ia32@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.24.2.tgz#f8a16615a78826ccbb6566fab9a9606cfd4a37d5" + integrity sha512-sfv0tGPQhcZOgTKO3oBE9xpHuUqguHvSo4jl+wjnKwFpapx+vUDcawbwPNuBIAYdRAvIDBfZVvXprIj3HA+Ugw== + +"@esbuild/linux-loong64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.24.2.tgz#1c451538c765bf14913512c76ed8a351e18b09fc" + integrity sha512-CN9AZr8kEndGooS35ntToZLTQLHEjtVB5n7dl8ZcTZMonJ7CCfStrYhrzF97eAecqVbVJ7APOEe18RPI4KLhwQ== + +"@esbuild/linux-mips64el@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.24.2.tgz#0846edeefbc3d8d50645c51869cc64401d9239cb" + integrity sha512-iMkk7qr/wl3exJATwkISxI7kTcmHKE+BlymIAbHO8xanq/TjHaaVThFF6ipWzPHryoFsesNQJPE/3wFJw4+huw== + +"@esbuild/linux-ppc64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.24.2.tgz#8e3fc54505671d193337a36dfd4c1a23b8a41412" + integrity sha512-shsVrgCZ57Vr2L8mm39kO5PPIb+843FStGt7sGGoqiiWYconSxwTiuswC1VJZLCjNiMLAMh34jg4VSEQb+iEbw== + +"@esbuild/linux-riscv64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.24.2.tgz#6a1e92096d5e68f7bb10a0d64bb5b6d1daf9a694" + integrity sha512-4eSFWnU9Hhd68fW16GD0TINewo1L6dRrB+oLNNbYyMUAeOD2yCK5KXGK1GH4qD/kT+bTEXjsyTCiJGHPZ3eM9Q== + +"@esbuild/linux-s390x@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.24.2.tgz#ab18e56e66f7a3c49cb97d337cd0a6fea28a8577" + integrity sha512-S0Bh0A53b0YHL2XEXC20bHLuGMOhFDO6GN4b3YjRLK//Ep3ql3erpNcPlEFed93hsQAjAQDNsvcK+hV90FubSw== + +"@esbuild/linux-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.24.2.tgz#8140c9b40da634d380b0b29c837a0b4267aff38f" + integrity sha512-8Qi4nQcCTbLnK9WoMjdC9NiTG6/E38RNICU6sUNqK0QFxCYgoARqVqxdFmWkdonVsvGqWhmm7MO0jyTqLqwj0Q== + +"@esbuild/netbsd-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-arm64/-/netbsd-arm64-0.24.2.tgz#65f19161432bafb3981f5f20a7ff45abb2e708e6" + integrity sha512-wuLK/VztRRpMt9zyHSazyCVdCXlpHkKm34WUyinD2lzK07FAHTq0KQvZZlXikNWkDGoT6x3TD51jKQ7gMVpopw== + +"@esbuild/netbsd-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.24.2.tgz#7a3a97d77abfd11765a72f1c6f9b18f5396bcc40" + integrity sha512-VefFaQUc4FMmJuAxmIHgUmfNiLXY438XrL4GDNV1Y1H/RW3qow68xTwjZKfj/+Plp9NANmzbH5R40Meudu8mmw== + +"@esbuild/openbsd-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.2.tgz#58b00238dd8f123bfff68d3acc53a6ee369af89f" + integrity sha512-YQbi46SBct6iKnszhSvdluqDmxCJA+Pu280Av9WICNwQmMxV7nLRHZfjQzwbPs3jeWnuAhE9Jy0NrnJ12Oz+0A== + +"@esbuild/openbsd-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.24.2.tgz#0ac843fda0feb85a93e288842936c21a00a8a205" + integrity sha512-+iDS6zpNM6EnJyWv0bMGLWSWeXGN/HTaF/LXHXHwejGsVi+ooqDfMCCTerNFxEkM3wYVcExkeGXNqshc9iMaOA== + +"@esbuild/sunos-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz#8b7aa895e07828d36c422a4404cc2ecf27fb15c6" + integrity sha512-hTdsW27jcktEvpwNHJU4ZwWFGkz2zRJUz8pvddmXPtXDzVKTTINmlmga3ZzwcuMpUvLw7JkLy9QLKyGpD2Yxig== + +"@esbuild/win32-arm64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.24.2.tgz#c023afb647cabf0c3ed13f0eddfc4f1d61c66a85" + integrity sha512-LihEQ2BBKVFLOC9ZItT9iFprsE9tqjDjnbulhHoFxYQtQfai7qfluVODIYxt1PgdoyQkz23+01rzwNwYfutxUQ== + +"@esbuild/win32-ia32@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.24.2.tgz#96c356132d2dda990098c8b8b951209c3cd743c2" + integrity sha512-q+iGUwfs8tncmFC9pcnD5IvRHAzmbwQ3GPS5/ceCyHdjXubwQWI12MKWSNSMYLJMq23/IUCvJMS76PDqXe1fxA== + +"@esbuild/win32-x64@0.24.2": + version "0.24.2" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.24.2.tgz#34aa0b52d0fbb1a654b596acfa595f0c7b77a77b" + integrity sha512-7VTgWzgMGvup6aSqDPLiW5zHaxYJGTO4OokMjIlrCtf+VpEL+cXKtCvg723iguPYI5oaUNdS+/V7OU2gvXVWEg== + +"@floating-ui/core@^1.6.0": + version "1.6.9" + resolved "https://registry.yarnpkg.com/@floating-ui/core/-/core-1.6.9.tgz#64d1da251433019dafa091de9b2886ff35ec14e6" + integrity sha512-uMXCuQ3BItDUbAMhIXw7UPXRfAlOAvZzdK9BWpE60MCn+Svt3aLn9jsPTi/WNGlRUu2uI0v5S7JiIUsbsvh3fw== + dependencies: + "@floating-ui/utils" "^0.2.9" + +"@floating-ui/dom@^1.6.11": + version "1.6.13" + resolved "https://registry.yarnpkg.com/@floating-ui/dom/-/dom-1.6.13.tgz#a8a938532aea27a95121ec16e667a7cbe8c59e34" + integrity sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w== + dependencies: + "@floating-ui/core" "^1.6.0" + "@floating-ui/utils" "^0.2.9" + +"@floating-ui/utils@^0.2.9": + version "0.2.9" + resolved "https://registry.yarnpkg.com/@floating-ui/utils/-/utils-0.2.9.tgz#50dea3616bc8191fb8e112283b49eaff03e78429" + integrity sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg== + +"@geoffcox/svelte-splitter@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@geoffcox/svelte-splitter/-/svelte-splitter-1.0.1.tgz#07d05fa89d51b9269616463e322ee79148cdbc58" + integrity sha512-iWDNDnuNhsB6tKMGqjXStRhytALlB+/KBQ82T4xSjDndQsrN0CKspdMiu21RUj0AQoDxv3yzAqgp3bwEnF5SCQ== + +"@isaacs/cliui@^8.0.2": + version "8.0.2" + resolved "https://registry.yarnpkg.com/@isaacs/cliui/-/cliui-8.0.2.tgz#b37667b7bc181c168782259bab42474fbf52b550" + integrity sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA== + dependencies: + string-width "^5.1.2" + string-width-cjs "npm:string-width@^4.2.0" + strip-ansi "^7.0.1" + strip-ansi-cjs "npm:strip-ansi@^6.0.1" + wrap-ansi "^8.1.0" + wrap-ansi-cjs "npm:wrap-ansi@^7.0.0" + +"@jridgewell/gen-mapping@^0.3.2", "@jridgewell/gen-mapping@^0.3.5": + version "0.3.8" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz#4f0e06362e01362f823d348f1872b08f666d8142" + integrity sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA== + dependencies: + "@jridgewell/set-array" "^1.2.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.24" + +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.2" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz#7a0ee601f60f99a20c7c7c5ff0c80388c1189bd6" + integrity sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw== + +"@jridgewell/set-array@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.2.1.tgz#558fb6472ed16a4c850b889530e6b36438c49280" + integrity sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A== + +"@jridgewell/sourcemap-codec@^1.4.10", "@jridgewell/sourcemap-codec@^1.4.14", "@jridgewell/sourcemap-codec@^1.4.15", "@jridgewell/sourcemap-codec@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" + integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== + +"@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": + version "0.3.25" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" + integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== + dependencies: + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" + +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.3": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + +"@pkgjs/parseargs@^0.11.0": + version "0.11.0" + resolved "https://registry.yarnpkg.com/@pkgjs/parseargs/-/parseargs-0.11.0.tgz#a77ea742fab25775145434eb1d2328cf5013ac33" + integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== + +"@polka/url@^1.0.0-next.24": + version "1.0.0-next.28" + resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.28.tgz#d45e01c4a56f143ee69c54dd6b12eade9e270a73" + integrity sha512-8LduaNlMZGwdZ6qWrKlfa+2M4gahzFkprZiAt2TF8uS0qQgBizKXpXURqvTJ4WtmupWxaLqjRb2UCTe72mu+Aw== + +"@popperjs/core@^2.9.3": + version "2.11.8" + resolved "https://registry.yarnpkg.com/@popperjs/core/-/core-2.11.8.tgz#6b79032e760a0899cd4204710beede972a3a185f" + integrity sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A== + +"@rollup/plugin-node-resolve@^15.2.3": + version "15.3.1" + resolved "https://registry.yarnpkg.com/@rollup/plugin-node-resolve/-/plugin-node-resolve-15.3.1.tgz#66008953c2524be786aa319d49e32f2128296a78" + integrity sha512-tgg6b91pAybXHJQMAAwW9VuWBO6Thi+q7BCNARLwSqlmsHz0XYURtGvh/AuwSADXSI4h/2uHbs7s4FzlZDGSGA== + dependencies: + "@rollup/pluginutils" "^5.0.1" + "@types/resolve" "1.20.2" + deepmerge "^4.2.2" + is-module "^1.0.0" + resolve "^1.22.1" + +"@rollup/pluginutils@^5.0.1": + version "5.1.4" + resolved "https://registry.yarnpkg.com/@rollup/pluginutils/-/pluginutils-5.1.4.tgz#bb94f1f9eaaac944da237767cdfee6c5b2262d4a" + integrity sha512-USm05zrsFxYLPdWWq+K3STlWiT/3ELn3RcV5hJMghpeAIhxfsUIg6mt12CBJBInWMV4VneoV7SfGv8xIwo2qNQ== + dependencies: + "@types/estree" "^1.0.0" + estree-walker "^2.0.2" + picomatch "^4.0.2" + +"@rollup/rollup-android-arm-eabi@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.31.0.tgz#d4dd60da0075a6ce9a6c76d71b8204f3e1822285" + integrity sha512-9NrR4033uCbUBRgvLcBrJofa2KY9DzxL2UKZ1/4xA/mnTNyhZCWBuD8X3tPm1n4KxcgaraOYgrFKSgwjASfmlA== + +"@rollup/rollup-android-arm64@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.31.0.tgz#25c4d33259a7a2ccd2f52a5ffcc0bb3ab3f0729d" + integrity sha512-iBbODqT86YBFHajxxF8ebj2hwKm1k8PTBQSojSt3d1FFt1gN+xf4CowE47iN0vOSdnd+5ierMHBbu/rHc7nq5g== + +"@rollup/rollup-darwin-arm64@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.31.0.tgz#d137dff254b19163a6b52ac083a71cd055dae844" + integrity sha512-WHIZfXgVBX30SWuTMhlHPXTyN20AXrLH4TEeH/D0Bolvx9PjgZnn4H677PlSGvU6MKNsjCQJYczkpvBbrBnG6g== + +"@rollup/rollup-darwin-x64@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.31.0.tgz#58ff20b5dacb797d3adca19f02a21c532f9d55bf" + integrity sha512-hrWL7uQacTEF8gdrQAqcDy9xllQ0w0zuL1wk1HV8wKGSGbKPVjVUv/DEwT2+Asabf8Dh/As+IvfdU+H8hhzrQQ== + +"@rollup/rollup-freebsd-arm64@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.31.0.tgz#96ce1a241c591ec3e068f4af765d94eddb24e60c" + integrity sha512-S2oCsZ4hJviG1QjPY1h6sVJLBI6ekBeAEssYKad1soRFv3SocsQCzX6cwnk6fID6UQQACTjeIMB+hyYrFacRew== + +"@rollup/rollup-freebsd-x64@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.31.0.tgz#e59e7ede505be41f0b4311b0b943f8eb44938467" + integrity sha512-pCANqpynRS4Jirn4IKZH4tnm2+2CqCNLKD7gAdEjzdLGbH1iO0zouHz4mxqg0uEMpO030ejJ0aA6e1PJo2xrPA== + +"@rollup/rollup-linux-arm-gnueabihf@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.31.0.tgz#e455ca6e4ff35bd46d62201c153352e717000a7b" + integrity sha512-0O8ViX+QcBd3ZmGlcFTnYXZKGbFu09EhgD27tgTdGnkcYXLat4KIsBBQeKLR2xZDCXdIBAlWLkiXE1+rJpCxFw== + +"@rollup/rollup-linux-arm-musleabihf@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.31.0.tgz#bc1a93d807d19e70b1e343a5bfea43723bcd6327" + integrity sha512-w5IzG0wTVv7B0/SwDnMYmbr2uERQp999q8FMkKG1I+j8hpPX2BYFjWe69xbhbP6J9h2gId/7ogesl9hwblFwwg== + +"@rollup/rollup-linux-arm64-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.31.0.tgz#f38bf843f1dc3d5de680caf31084008846e3efae" + integrity sha512-JyFFshbN5xwy6fulZ8B/8qOqENRmDdEkcIMF0Zz+RsfamEW+Zabl5jAb0IozP/8UKnJ7g2FtZZPEUIAlUSX8cA== + +"@rollup/rollup-linux-arm64-musl@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.31.0.tgz#b3987a96c18b7287129cf735be2dbf83e94d9d05" + integrity sha512-kpQXQ0UPFeMPmPYksiBL9WS/BDiQEjRGMfklVIsA0Sng347H8W2iexch+IEwaR7OVSKtr2ZFxggt11zVIlZ25g== + +"@rollup/rollup-linux-loongarch64-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.31.0.tgz#0f0324044e71c4f02e9f49e7ec4e347b655b34ee" + integrity sha512-pMlxLjt60iQTzt9iBb3jZphFIl55a70wexvo8p+vVFK+7ifTRookdoXX3bOsRdmfD+OKnMozKO6XM4zR0sHRrQ== + +"@rollup/rollup-linux-powerpc64le-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.31.0.tgz#809479f27f1fd5b4eecd2aa732132ad952d454ba" + integrity sha512-D7TXT7I/uKEuWiRkEFbed1UUYZwcJDU4vZQdPTcepK7ecPhzKOYk4Er2YR4uHKme4qDeIh6N3XrLfpuM7vzRWQ== + +"@rollup/rollup-linux-riscv64-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.31.0.tgz#7bc75c4f22db04d3c972f83431739cfa41c6a36e" + integrity sha512-wal2Tc8O5lMBtoePLBYRKj2CImUCJ4UNGJlLwspx7QApYny7K1cUYlzQ/4IGQBLmm+y0RS7dwc3TDO/pmcneTw== + +"@rollup/rollup-linux-s390x-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.31.0.tgz#cfe8052345c55864d83ae343362cf1912480170e" + integrity sha512-O1o5EUI0+RRMkK9wiTVpk2tyzXdXefHtRTIjBbmFREmNMy7pFeYXCFGbhKFwISA3UOExlo5GGUuuj3oMKdK6JQ== + +"@rollup/rollup-linux-x64-gnu@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.31.0.tgz#c6b048f1e25f3fea5b4bd246232f4d07a159c5a0" + integrity sha512-zSoHl356vKnNxwOWnLd60ixHNPRBglxpv2g7q0Cd3Pmr561gf0HiAcUBRL3S1vPqRC17Zo2CX/9cPkqTIiai1g== + +"@rollup/rollup-linux-x64-musl@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.31.0.tgz#615273ac52d1a201f4de191cbd3389016a9d7d80" + integrity sha512-ypB/HMtcSGhKUQNiFwqgdclWNRrAYDH8iMYH4etw/ZlGwiTVxBz2tDrGRrPlfZu6QjXwtd+C3Zib5pFqID97ZA== + +"@rollup/rollup-win32-arm64-msvc@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.31.0.tgz#32ed85810c1b831c648eca999d68f01255b30691" + integrity sha512-JuhN2xdI/m8Hr+aVO3vspO7OQfUFO6bKLIRTAy0U15vmWjnZDLrEgCZ2s6+scAYaQVpYSh9tZtRijApw9IXyMw== + +"@rollup/rollup-win32-ia32-msvc@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.31.0.tgz#d47effada68bcbfdccd30c4a788d42e4542ff4d3" + integrity sha512-U1xZZXYkvdf5MIWmftU8wrM5PPXzyaY1nGCI4KI4BFfoZxHamsIe+BtnPLIvvPykvQWlVbqUXdLa4aJUuilwLQ== + +"@rollup/rollup-win32-x64-msvc@4.31.0": + version "4.31.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.31.0.tgz#7a2d89a82cf0388d60304964217dd7beac6de645" + integrity sha512-ul8rnCsUumNln5YWwz0ted2ZHFhzhRRnkpBZ+YRuHoRAlUji9KChpOUOndY7uykrPEPXVbHLlsdo6v5yXo/TXw== + +"@sveltejs/adapter-static@^3.0.6": + version "3.0.8" + resolved "https://registry.yarnpkg.com/@sveltejs/adapter-static/-/adapter-static-3.0.8.tgz#f23ee99a9678dbaec58b79d183bc3defbfe99f1a" + integrity sha512-YaDrquRpZwfcXbnlDsSrBQNCChVOT9MGuSg+dMAyfsAa1SmiAhrA5jUYUiIMC59G92kIbY/AaQOWcBdq+lh+zg== + +"@sveltejs/kit@^2.9.0": + version "2.16.0" + resolved "https://registry.yarnpkg.com/@sveltejs/kit/-/kit-2.16.0.tgz#3961182fdb69eff96a912eb16c1359cf14d37840" + integrity sha512-S9i1ZWKqluzoaJ6riYnEdbe+xJluMTMkhABouBa66GaWcAyCjW/jAc0NdJQJ/DXyK1CnP5quBW25e99MNyvLxA== + dependencies: + "@types/cookie" "^0.6.0" + cookie "^0.6.0" + devalue "^5.1.0" + esm-env "^1.2.2" + import-meta-resolve "^4.1.0" + kleur "^4.1.5" + magic-string "^0.30.5" + mrmime "^2.0.0" + sade "^1.8.1" + set-cookie-parser "^2.6.0" + sirv "^3.0.0" + +"@sveltejs/vite-plugin-svelte-inspector@^4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@sveltejs/vite-plugin-svelte-inspector/-/vite-plugin-svelte-inspector-4.0.1.tgz#2f99a4a593bb910d1492f6c00a042b521c07147e" + integrity sha512-J/Nmb2Q2y7mck2hyCX4ckVHcR5tu2J+MtBEQqpDrrgELZ2uvraQcK/ioCV61AqkdXFgriksOKIceDcQmqnGhVw== + dependencies: + debug "^4.3.7" + +"@sveltejs/vite-plugin-svelte@^5.0.0": + version "5.0.3" + resolved "https://registry.yarnpkg.com/@sveltejs/vite-plugin-svelte/-/vite-plugin-svelte-5.0.3.tgz#50f425c677243e00fda0402c049f28b489c7ab81" + integrity sha512-MCFS6CrQDu1yGwspm4qtli0e63vaPCehf6V7pIMP15AsWgMKrqDGCPFF/0kn4SP0ii4aySu4Pa62+fIRGFMjgw== + dependencies: + "@sveltejs/vite-plugin-svelte-inspector" "^4.0.1" + debug "^4.4.0" + deepmerge "^4.3.1" + kleur "^4.1.5" + magic-string "^0.30.15" + vitefu "^1.0.4" + +"@tailwindcss/typography@^0.5.14": + version "0.5.16" + resolved "https://registry.yarnpkg.com/@tailwindcss/typography/-/typography-0.5.16.tgz#a926c8f44d5c439b2915e231cad80058850047c6" + integrity sha512-0wDLwCVF5V3x3b1SGXPCDcdsbDHMBe+lkFzBRaHeLvNi+nrrnZ1lA18u+OTWO8iSWU2GxUOCvlXtDuqftc1oiA== + dependencies: + lodash.castarray "^4.4.0" + lodash.isplainobject "^4.0.6" + lodash.merge "^4.6.2" + postcss-selector-parser "6.0.10" + +"@tauri-apps/api@^2", "@tauri-apps/api@^2.0.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@tauri-apps/api/-/api-2.2.0.tgz#daaff2515b1a4ff1e763cf84a414548b02b566c5" + integrity sha512-R8epOeZl1eJEl603aUMIGb4RXlhPjpgxbGVEaqY+0G5JG9vzV/clNlzTeqc+NLYXVqXcn8mb4c5b9pJIUDEyAg== + +"@tauri-apps/cli-darwin-arm64@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-arm64/-/cli-darwin-arm64-2.2.5.tgz#6a4265d99120c5464cd9df6deec5aad3f028bf67" + integrity sha512-qdPmypQE7qj62UJy3Wl/ccCJZwsv5gyBByOrAaG7u5c/PB3QSxhNPegice2k4EHeIuApaVJOoe/CEYVgm/og2Q== + +"@tauri-apps/cli-darwin-x64@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-darwin-x64/-/cli-darwin-x64-2.2.5.tgz#0e3f5645da1f9ef080cfab99758a9b67023f0802" + integrity sha512-8JVlCAb2c3n0EcGW7n/1kU4Rq831SsoLDD/0hNp85Um8HGIH2Mg/qos/MLOc8Qv2mOaoKcRKf4hd0I1y0Rl9Cg== + +"@tauri-apps/cli-linux-arm-gnueabihf@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm-gnueabihf/-/cli-linux-arm-gnueabihf-2.2.5.tgz#5d73875a9e084794eb8aa02b6d6967839439941d" + integrity sha512-mzxQCqZg7ljRVgekPpXQ5TOehCNgnXh/DNWU6kFjALaBvaw4fGzc369/hV94wOt29htNFyxf8ty2DaQaYljEHw== + +"@tauri-apps/cli-linux-arm64-gnu@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-gnu/-/cli-linux-arm64-gnu-2.2.5.tgz#8ea9d3e00af8970cda6873d94ba91972e1049234" + integrity sha512-M9nkzx5jsSJSNpp7aSza0qv0/N13SUNzH8ysYSZ7IaCN8coGeMg2KgQ5qC6tqUVij2rbg8A/X1n0pPo/gtLx0A== + +"@tauri-apps/cli-linux-arm64-musl@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.5.tgz#9ca4d6bcacef3b92c576bd2fbb9fa147071c97ad" + integrity sha512-tFhZu950HNRLR1RM5Q9Xj5gAlA6AhyyiZgeoXGFAWto+s2jpWmmA3Qq2GUxnVDr7Xui8PF4UY5kANDIOschuwg== + +"@tauri-apps/cli-linux-x64-gnu@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-gnu/-/cli-linux-x64-gnu-2.2.5.tgz#29956ad3b40762c592ff54293928212f0a809572" + integrity sha512-eaGhTQLr3EKeksGsp2tK/Ndi7/oyo3P53Pye6kg0zqXiqu8LQjg1CgvDm1l+5oit04S60zR4AqlDFpoeEtDGgw== + +"@tauri-apps/cli-linux-x64-musl@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.5.tgz#a3c017496db78a8bca6734516d7d66f14313d940" + integrity sha512-NLAO/SymDxeGuOWWQZCpwoED1K1jaHUvW+u9ip+rTetnxFPLvf3zXthx4QVKfCZLdj2WLQz4cLjHyQdMDXAM+w== + +"@tauri-apps/cli-win32-arm64-msvc@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-arm64-msvc/-/cli-win32-arm64-msvc-2.2.5.tgz#60df26f2153eec8dbfb72fdd63945eb04f74b7da" + integrity sha512-yG5KFbqrHfGjkAQAaaCD4i7cJklBjmMxZ2C92DEnqCOujSsEuLxrwwoKxQ4+hqEHOmF3lyX0vfqhgZcS03H38w== + +"@tauri-apps/cli-win32-ia32-msvc@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-ia32-msvc/-/cli-win32-ia32-msvc-2.2.5.tgz#b79ea1b43b703791155d0263543501efc221e655" + integrity sha512-G5lq+2EdxOc8ttg3uhME5t9U3hMGTxwaKz0X4DplTG2Iv4lcNWqw/AESIJVHa5a+EB+ZCC8I+yOfIykp/Cd5mQ== + +"@tauri-apps/cli-win32-x64-msvc@2.2.5": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli-win32-x64-msvc/-/cli-win32-x64-msvc-2.2.5.tgz#97e6a244ca74d8069a242272f1e71c03582b0ed4" + integrity sha512-vw4fPVOo0rIQIlqw6xUvK2nwiRFBHNgayDE2Z/SomJlQJAJ1q4VgpHOPl12ouuicmTjK1gWKm7RTouQe3Nig0Q== + +"@tauri-apps/cli@^2": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/cli/-/cli-2.2.5.tgz#d146f02201c5ab580513db2030de33e9a2a666ce" + integrity sha512-PaefTQUCYYqvZWdH8EhXQkyJEjQwtoy/OHGoPcZx7Gk3D3K6AtGSxZ9OlHIz3Bu5LDGgVBk36vKtHW0WYsWnbw== + optionalDependencies: + "@tauri-apps/cli-darwin-arm64" "2.2.5" + "@tauri-apps/cli-darwin-x64" "2.2.5" + "@tauri-apps/cli-linux-arm-gnueabihf" "2.2.5" + "@tauri-apps/cli-linux-arm64-gnu" "2.2.5" + "@tauri-apps/cli-linux-arm64-musl" "2.2.5" + "@tauri-apps/cli-linux-x64-gnu" "2.2.5" + "@tauri-apps/cli-linux-x64-musl" "2.2.5" + "@tauri-apps/cli-win32-arm64-msvc" "2.2.5" + "@tauri-apps/cli-win32-ia32-msvc" "2.2.5" + "@tauri-apps/cli-win32-x64-msvc" "2.2.5" + +"@tauri-apps/plugin-dialog@~2": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-dialog/-/plugin-dialog-2.2.0.tgz#2f7b841a982820adbc9c182e0e95acd8d90aa6fc" + integrity sha512-6bLkYK68zyK31418AK5fNccCdVuRnNpbxquCl8IqgFByOgWFivbiIlvb79wpSXi0O+8k8RCSsIpOquebusRVSg== + dependencies: + "@tauri-apps/api" "^2.0.0" + +"@tauri-apps/plugin-fs@~2": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-fs/-/plugin-fs-2.2.0.tgz#3cf2968139a9cd9e4d12a7a7176c0b59e446b0a3" + integrity sha512-+08mApuONKI8/sCNEZ6AR8vf5vI9DXD4YfrQ9NQmhRxYKMLVhRW164vdW5BSLmMpuevftpQ2FVoL9EFkfG9Z+g== + dependencies: + "@tauri-apps/api" "^2.0.0" + +"@tauri-apps/plugin-opener@^2": + version "2.2.5" + resolved "https://registry.yarnpkg.com/@tauri-apps/plugin-opener/-/plugin-opener-2.2.5.tgz#928b917d28d3e8b5bafb90f5f91fb0ed20c27fd4" + integrity sha512-hHsJ9RPWpZvZEPVFaL+d25gABMUMOf/A6ESXnvf/ii9guTukj58WXsAE/SOysXRIhej7kseRCxnOnIMpSCdUsQ== + dependencies: + "@tauri-apps/api" "^2.0.0" + +"@types/cookie@^0.6.0": + version "0.6.0" + resolved "https://registry.yarnpkg.com/@types/cookie/-/cookie-0.6.0.tgz#eac397f28bf1d6ae0ae081363eca2f425bedf0d5" + integrity sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA== + +"@types/estree@1.0.6", "@types/estree@^1.0.0", "@types/estree@^1.0.5", "@types/estree@^1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== + +"@types/resolve@1.20.2": + version "1.20.2" + resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" + integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q== + +"@yr/monotone-cubic-spline@^1.0.3": + version "1.0.3" + resolved "https://registry.yarnpkg.com/@yr/monotone-cubic-spline/-/monotone-cubic-spline-1.0.3.tgz#7272d89f8e4f6fb7a1600c28c378cc18d3b577b9" + integrity sha512-FQXkOta0XBSUPHndIKON2Y9JeQz5ZeMqLYZVVK93FliNBFm7LNMIZmY6FrMEB9XPcDbE2bekMbZD6kzDkxwYjA== + +acorn-typescript@^1.4.13: + version "1.4.13" + resolved "https://registry.yarnpkg.com/acorn-typescript/-/acorn-typescript-1.4.13.tgz#5f851c8bdda0aa716ffdd5f6ac084df8acc6f5ea" + integrity sha512-xsc9Xv0xlVfwp2o7sQ+GCQ1PgbkdcpWdTzrwXxO3xDMTAywVS3oXVOcOHuRjAPkS4P9b+yc/qNF15460v+jp4Q== + +acorn@^8.12.1: + version "8.14.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" + integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== + +ansi-regex@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== + +ansi-regex@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-6.1.0.tgz#95ec409c69619d6cb1b8b34f14b660ef28ebd654" + integrity sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA== + +ansi-styles@^4.0.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== + dependencies: + color-convert "^2.0.1" + +ansi-styles@^6.1.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-6.2.1.tgz#0e62320cf99c21afff3b3012192546aacbfb05c5" + integrity sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug== + +any-promise@^1.0.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + +anymatch@~3.1.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== + dependencies: + normalize-path "^3.0.0" + picomatch "^2.0.4" + +apexcharts@^3.54.1: + version "3.54.1" + resolved "https://registry.yarnpkg.com/apexcharts/-/apexcharts-3.54.1.tgz#9b78a6ee10e4225d5e04309c8e5b379e2c5c9da2" + integrity sha512-E4et0h/J1U3r3EwS/WlqJCQIbepKbp6wGUmaAwJOMjHUP4Ci0gxanLa7FR3okx6p9coi4st6J853/Cb1NP0vpA== + dependencies: + "@yr/monotone-cubic-spline" "^1.0.3" + svg.draggable.js "^2.2.2" + svg.easing.js "^2.0.0" + svg.filter.js "^2.0.2" + svg.pathmorphing.js "^0.1.3" + svg.resize.js "^1.4.3" + svg.select.js "^3.0.1" + +arg@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/arg/-/arg-5.0.2.tgz#c81433cc427c92c4dcf4865142dbca6f15acd59c" + integrity sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg== + +aria-query@^5.3.1: + version "5.3.2" + resolved "https://registry.yarnpkg.com/aria-query/-/aria-query-5.3.2.tgz#93f81a43480e33a338f19163a3d10a50c01dcd59" + integrity sha512-COROpnaoap1E2F000S62r6A60uHZnmlvomhfyT2DlTcrY1OrBKn2UhH7qn5wTC9zMvD0AY7csdPSNwKP+7WiQw== + +autoprefixer@^10.4.20: + version "10.4.20" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.20.tgz#5caec14d43976ef42e32dcb4bd62878e96be5b3b" + integrity sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g== + dependencies: + browserslist "^4.23.3" + caniuse-lite "^1.0.30001646" + fraction.js "^4.3.7" + normalize-range "^0.1.2" + picocolors "^1.0.1" + postcss-value-parser "^4.2.0" + +axobject-query@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/axobject-query/-/axobject-query-4.1.0.tgz#28768c76d0e3cff21bc62a9e2d0b6ac30042a1ee" + integrity sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ== + +balanced-match@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== + +binary-extensions@^2.0.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.3.0.tgz#f6e14a97858d327252200242d4ccfe522c445522" + integrity sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw== + +brace-expansion@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-2.0.1.tgz#1edc459e0f0c548486ecf9fc99f2221364b9a0ae" + integrity sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA== + dependencies: + balanced-match "^1.0.0" + +braces@^3.0.3, braces@~3.0.2: + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== + dependencies: + fill-range "^7.1.1" + +browserslist@^4.23.3: + version "4.24.4" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.4.tgz#c6b2865a3f08bcb860a0e827389003b9fe686e4b" + integrity sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A== + dependencies: + caniuse-lite "^1.0.30001688" + electron-to-chromium "^1.5.73" + node-releases "^2.0.19" + update-browserslist-db "^1.1.1" + +buffer-builder@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/buffer-builder/-/buffer-builder-0.2.0.tgz#3322cd307d8296dab1f604618593b261a3fade8f" + integrity sha512-7VPMEPuYznPSoR21NE1zvd2Xna6c/CloiZCfcMXR1Jny6PjX0N4Nsa38zcBFo/FMK+BlA+FLKbJCQ0i2yxp+Xg== + +camelcase-css@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" + integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== + +caniuse-lite@^1.0.30001646, caniuse-lite@^1.0.30001688: + version "1.0.30001695" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001695.tgz#39dfedd8f94851132795fdf9b79d29659ad9c4d4" + integrity sha512-vHyLade6wTgI2u1ec3WQBxv+2BrTERV28UXQu9LO6lZ9pYeMk34vjXFLOxo1A4UBA8XTL4njRQZdno/yYaSmWw== + +chokidar@^3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.6.0.tgz#197c6cc669ef2a8dc5e7b4d97ee4e092c3eb0d5b" + integrity sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw== + dependencies: + anymatch "~3.1.2" + braces "~3.0.2" + glob-parent "~5.1.2" + is-binary-path "~2.1.0" + is-glob "~4.0.1" + normalize-path "~3.0.0" + readdirp "~3.6.0" + optionalDependencies: + fsevents "~2.3.2" + +chokidar@^4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-4.0.3.tgz#7be37a4c03c9aee1ecfe862a4a23b2c70c205d30" + integrity sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA== + dependencies: + readdirp "^4.0.1" + +clsx@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/clsx/-/clsx-2.1.1.tgz#eed397c9fd8bd882bfb18deab7102049a2f32999" + integrity sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA== + +color-convert@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== + dependencies: + color-name "~1.1.4" + +color-name@~1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== + +colorjs.io@^0.5.0: + version "0.5.2" + resolved "https://registry.yarnpkg.com/colorjs.io/-/colorjs.io-0.5.2.tgz#63b20139b007591ebc3359932bef84628eb3fcef" + integrity sha512-twmVoizEW7ylZSN32OgKdXRmo1qg+wT5/6C3xu5b9QsWzSFAhHLn2xd8ro0diCsKfCj1RdaTP/nrcW+vAoQPIw== + +commander@^4.0.0: + version "4.1.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" + integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== + +cookie@^0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== + +cross-spawn@^7.0.0: + version "7.0.6" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f" + integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + +cssesc@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" + integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== + +debug@^4.3.7, debug@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.4.0.tgz#2b3f2aea2ffeb776477460267377dc8710faba8a" + integrity sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA== + dependencies: + ms "^2.1.3" + +deepmerge@^4.2.2, deepmerge@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + +devalue@^5.1.0: + version "5.1.1" + resolved "https://registry.yarnpkg.com/devalue/-/devalue-5.1.1.tgz#a71887ac0f354652851752654e4bd435a53891ae" + integrity sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw== + +didyoumean@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.2.tgz#989346ffe9e839b4555ecf5666edea0d3e8ad037" + integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== + +dlv@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" + integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== + +eastasianwidth@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" + integrity sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA== + +electron-to-chromium@^1.5.73: + version "1.5.84" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.84.tgz#8e334ca206bb293a20b16418bf454783365b0a95" + integrity sha512-I+DQ8xgafao9Ha6y0qjHHvpZ9OfyA1qKlkHkjywxzniORU2awxyz7f/iVJcULmrF2yrM3nHQf+iDjJtbbexd/g== + +emoji-regex@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== + +emoji-regex@^9.2.2: + version "9.2.2" + resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-9.2.2.tgz#840c8803b0d8047f4ff0cf963176b32d4ef3ed72" + integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== + +esbuild@^0.24.2: + version "0.24.2" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.24.2.tgz#b5b55bee7de017bff5fb8a4e3e44f2ebe2c3567d" + integrity sha512-+9egpBW8I3CD5XPe0n6BfT5fxLzxrlDzqydF3aviG+9ni1lDC/OvMHcxqEFV0+LANZG5R1bFMWfUrjVsdwxJvA== + optionalDependencies: + "@esbuild/aix-ppc64" "0.24.2" + "@esbuild/android-arm" "0.24.2" + "@esbuild/android-arm64" "0.24.2" + "@esbuild/android-x64" "0.24.2" + "@esbuild/darwin-arm64" "0.24.2" + "@esbuild/darwin-x64" "0.24.2" + "@esbuild/freebsd-arm64" "0.24.2" + "@esbuild/freebsd-x64" "0.24.2" + "@esbuild/linux-arm" "0.24.2" + "@esbuild/linux-arm64" "0.24.2" + "@esbuild/linux-ia32" "0.24.2" + "@esbuild/linux-loong64" "0.24.2" + "@esbuild/linux-mips64el" "0.24.2" + "@esbuild/linux-ppc64" "0.24.2" + "@esbuild/linux-riscv64" "0.24.2" + "@esbuild/linux-s390x" "0.24.2" + "@esbuild/linux-x64" "0.24.2" + "@esbuild/netbsd-arm64" "0.24.2" + "@esbuild/netbsd-x64" "0.24.2" + "@esbuild/openbsd-arm64" "0.24.2" + "@esbuild/openbsd-x64" "0.24.2" + "@esbuild/sunos-x64" "0.24.2" + "@esbuild/win32-arm64" "0.24.2" + "@esbuild/win32-ia32" "0.24.2" + "@esbuild/win32-x64" "0.24.2" + +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== + +esm-env-robust@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/esm-env-robust/-/esm-env-robust-0.0.3.tgz#ef95644dde2f2bd750dec4366a43950b3dc3ed50" + integrity sha512-90Gnuw2DALOqlL1581VxP3GHPUNHX9U+fQ+8FNcTTFClhY5gEggAAnJ3q1b2Oq23knRsjv8YpNeMRPaMLUymOA== + dependencies: + esm-env "^1.0.0" + +esm-env@^1.0.0, esm-env@^1.2.1, esm-env@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/esm-env/-/esm-env-1.2.2.tgz#263c9455c55861f41618df31b20cb571fc20b75e" + integrity sha512-Epxrv+Nr/CaL4ZcFGPJIYLWFom+YeV1DqMLHJoEd9SYRxNbaFruBwfEX/kkHUJf55j2+TUbmDcmuilbP1TmXHA== + +esrap@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/esrap/-/esrap-1.4.3.tgz#456ed3c97cf0e6b58b952d351c7c78fe27116576" + integrity sha512-Xddc1RsoFJ4z9nR7W7BFaEPIp4UXoeQ0+077UdWLxbafMQFyU79sQJMk7kxNgRwQ9/aVgaKacCHC2pUACGwmYw== + dependencies: + "@jridgewell/sourcemap-codec" "^1.4.15" + +estree-walker@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-2.0.2.tgz#52f010178c2a4c117a7757cfe942adb7d2da4cac" + integrity sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w== + +fast-glob@^3.3.2: + version "3.3.3" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.3.3.tgz#d06d585ce8dba90a16b0505c543c3ccfb3aeb818" + integrity sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg== + dependencies: + "@nodelib/fs.stat" "^2.0.2" + "@nodelib/fs.walk" "^1.2.3" + glob-parent "^5.1.2" + merge2 "^1.3.0" + micromatch "^4.0.8" + +fastq@^1.6.0: + version "1.18.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.18.0.tgz#d631d7e25faffea81887fe5ea8c9010e1b36fee0" + integrity sha512-QKHXPW0hD8g4UET03SdOdunzSouc9N4AuHdsX8XNcTsuz+yYFILVNIX4l9yHABMhiEI9Db0JTTIpu0wB+Y1QQw== + dependencies: + reusify "^1.0.4" + +fdir@^6.2.0: + version "6.4.3" + resolved "https://registry.yarnpkg.com/fdir/-/fdir-6.4.3.tgz#011cdacf837eca9b811c89dbb902df714273db72" + integrity sha512-PMXmW2y1hDDfTSRc9gaXIuCCRpuoz3Kaz8cUelp3smouvfT632ozg2vrT6lJsHKKOF59YLbOGfAWGUcKEfRMQw== + +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== + dependencies: + to-regex-range "^5.0.1" + +flowbite-datepicker@^1.3.0: + version "1.3.2" + resolved "https://registry.yarnpkg.com/flowbite-datepicker/-/flowbite-datepicker-1.3.2.tgz#ad830d73f923344fb5614978f0d87e790cc69c4b" + integrity sha512-6Nfm0MCVX3mpaR7YSCjmEO2GO8CDt6CX8ZpQnGdeu03WUCWtEPQ/uy0PUiNtIJjJZWnX0Cm3H55MOhbD1g+E/g== + dependencies: + "@rollup/plugin-node-resolve" "^15.2.3" + flowbite "^2.0.0" + +flowbite-svelte-icons@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/flowbite-svelte-icons/-/flowbite-svelte-icons-2.0.2.tgz#6723046e131b4f43c9382a917109388154c42cf5" + integrity sha512-Vkmduy2867Rk8R7TziPirsWkixJnToFBEXRaN4ouJabOx62NQjiBbHFe+HTaMOQmdp4FNMI2Nhtk2I2CQ8r3RQ== + dependencies: + tailwind-merge "^2.5.4" + tailwindcss "^3.4.14" + +flowbite-svelte@^0.47.4: + version "0.47.4" + resolved "https://registry.yarnpkg.com/flowbite-svelte/-/flowbite-svelte-0.47.4.tgz#87e54d7818fc97e7e826a95a16092199500eac49" + integrity sha512-8oiY/oeWA7fgkDF91MZKEBo5VmjL8El3wuqTDWAFO1j7p45BHIL6G1VGnnidgCEYlbADDQN9BIGCvyPq4J3g+w== + dependencies: + "@floating-ui/dom" "^1.6.11" + apexcharts "^3.54.1" + flowbite "^2.5.2" + tailwind-merge "^2.5.4" + +flowbite@^2.0.0, flowbite@^2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/flowbite/-/flowbite-2.5.2.tgz#4a14b87ad3f2abd8bcd7b0fb52a6b06fd7a74685" + integrity sha512-kwFD3n8/YW4EG8GlY3Od9IoKND97kitO+/ejISHSqpn3vw2i5K/+ZI8Jm2V+KC4fGdnfi0XZ+TzYqQb4Q1LshA== + dependencies: + "@popperjs/core" "^2.9.3" + flowbite-datepicker "^1.3.0" + mini-svg-data-uri "^1.4.3" + +foreground-child@^3.1.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-3.3.0.tgz#0ac8644c06e431439f8561db8ecf29a7b5519c77" + integrity sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg== + dependencies: + cross-spawn "^7.0.0" + signal-exit "^4.0.1" + +fraction.js@^4.3.7: + version "4.3.7" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" + integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== + +fsevents@~2.3.2, fsevents@~2.3.3: + version "2.3.3" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" + integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw== + +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + +glob-parent@^5.1.2, glob-parent@~5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== + dependencies: + is-glob "^4.0.1" + +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + +glob@^10.3.10: + version "10.4.5" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.5.tgz#f4d9f0b90ffdbab09c9d77f5f29b4262517b0956" + integrity sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + package-json-from-dist "^1.0.0" + path-scurry "^1.11.1" + +has-flag@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== + +hasown@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" + integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== + dependencies: + function-bind "^1.1.2" + +immutable@^5.0.2: + version "5.0.3" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-5.0.3.tgz#aa037e2313ea7b5d400cd9298fa14e404c933db1" + integrity sha512-P8IdPQHq3lA1xVeBRi5VPqUm5HDgKnx0Ru51wZz5mjxHr5n3RWhjIpOFU7ybkUxfB+5IToy+OLaHYDBIWsv+uw== + +import-meta-resolve@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz#f9db8bead9fafa61adb811db77a2bf22c5399706" + integrity sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw== + +is-binary-path@~2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== + dependencies: + binary-extensions "^2.0.0" + +is-core-module@^2.16.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.16.1.tgz#2a98801a849f43e2add644fbb6bc6229b19a4ef4" + integrity sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w== + dependencies: + hasown "^2.0.2" + +is-extglob@^2.1.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== + +is-fullwidth-code-point@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== + +is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== + dependencies: + is-extglob "^2.1.1" + +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + integrity sha512-51ypPSPCoTEIN9dy5Oy+h4pShgJmPCygKfyRCISBI+JoWT/2oJvK8QPxmwv7b/p239jXrm9M1mlQbyKJ5A152g== + +is-number@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== + +is-reference@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-reference/-/is-reference-3.0.3.tgz#9ef7bf9029c70a67b2152da4adf57c23d718910f" + integrity sha512-ixkJoqQvAP88E6wLydLGGqCJsrFUnqoH6HnaczB8XmDH1oaWU+xxdptvikTgaEhtZ53Ky6YXiBuUI2WXLMCwjw== + dependencies: + "@types/estree" "^1.0.6" + +isexe@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== + +jackspeak@^3.1.2: + version "3.4.3" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.3.tgz#8833a9d89ab4acde6188942bd1c53b6390ed5a8a" + integrity sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + +jiti@^1.21.6: + version "1.21.7" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.7.tgz#9dd81043424a3d28458b193d965f0d18a2300ba9" + integrity sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A== + +kleur@^4.1.5: + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== + +lilconfig@^3.0.0, lilconfig@^3.1.3: + version "3.1.3" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.3.tgz#a1bcfd6257f9585bf5ae14ceeebb7b559025e4c4" + integrity sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw== + +lines-and-columns@^1.1.6: + version "1.2.4" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" + integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== + +locate-character@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/locate-character/-/locate-character-3.0.0.tgz#0305c5b8744f61028ef5d01f444009e00779f974" + integrity sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA== + +lodash.castarray@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.castarray/-/lodash.castarray-4.4.0.tgz#c02513515e309daddd4c24c60cfddcf5976d9115" + integrity sha512-aVx8ztPv7/2ULbArGJ2Y42bG1mEQ5mGjpdvrbJcJFU3TbYybe+QlLS4pst9zV52ymy2in1KpFPiZnAOATxD4+Q== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + +lru-cache@^10.2.0: + version "10.4.3" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.4.3.tgz#410fc8a17b70e598013df257c2446b7f3383f119" + integrity sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ== + +magic-string@^0.30.11, magic-string@^0.30.15, magic-string@^0.30.5: + version "0.30.17" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.17.tgz#450a449673d2460e5bbcfba9a61916a1714c7453" + integrity sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA== + dependencies: + "@jridgewell/sourcemap-codec" "^1.5.0" + +merge2@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== + +micromatch@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202" + integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA== + dependencies: + braces "^3.0.3" + picomatch "^2.3.1" + +mini-svg-data-uri@^1.4.3: + version "1.4.4" + resolved "https://registry.yarnpkg.com/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz#8ab0aabcdf8c29ad5693ca595af19dd2ead09939" + integrity sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg== + +minimatch@^9.0.4: + version "9.0.5" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" + integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== + dependencies: + brace-expansion "^2.0.1" + +"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + +mri@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/mri/-/mri-1.2.0.tgz#6721480fec2a11a4889861115a48b6cbe7cc8f0b" + integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== + +mrmime@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-2.0.0.tgz#151082a6e06e59a9a39b46b3e14d5cfe92b3abb4" + integrity sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw== + +ms@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== + +mz@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/mz/-/mz-2.7.0.tgz#95008057a56cafadc2bc63dde7f9ff6955948e32" + integrity sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q== + dependencies: + any-promise "^1.0.0" + object-assign "^4.0.1" + thenify-all "^1.0.0" + +nanoid@^3.3.8: + version "3.3.8" + resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.8.tgz#b1be3030bee36aaff18bacb375e5cce521684baf" + integrity sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w== + +node-releases@^2.0.19: + version "2.0.19" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.19.tgz#9e445a52950951ec4d177d843af370b411caf314" + integrity sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw== + +normalize-path@^3.0.0, normalize-path@~3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== + +normalize-range@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" + integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== + +object-assign@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== + +object-hash@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-3.0.0.tgz#73f97f753e7baffc0e2cc9d6e079079744ac82e9" + integrity sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw== + +package-json-from-dist@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz#4f1471a010827a86f94cfd9b0727e36d267de505" + integrity sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw== + +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + +path-parse@^1.0.7: + version "1.0.7" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== + +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + +paths@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/paths/-/paths-0.1.1.tgz#9ad909d7f769dd8acb3a1c033c5eef43123d3d17" + integrity sha512-MO2qBeKTXfH+qlZjo2+R5GmjCo/1r9CIWwtA7JUzafClhzWH9x/nDrJiaTXsWekNrHMlbSSI+Mc4654INuvbrQ== + +picocolors@^1.0.0, picocolors@^1.0.1, picocolors@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== + +picomatch@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-4.0.2.tgz#77c742931e8f3b8820946c76cd0c1f13730d1dab" + integrity sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg== + +pify@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pirates@^4.0.1: + version "4.0.6" + resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.6.tgz#3018ae32ecfcff6c29ba2267cbf21166ac1f36b9" + integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== + +postcss-import@^15.1.0: + version "15.1.0" + resolved "https://registry.yarnpkg.com/postcss-import/-/postcss-import-15.1.0.tgz#41c64ed8cc0e23735a9698b3249ffdbf704adc70" + integrity sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew== + dependencies: + postcss-value-parser "^4.0.0" + read-cache "^1.0.0" + resolve "^1.1.7" + +postcss-js@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-4.0.1.tgz#61598186f3703bab052f1c4f7d805f3991bee9d2" + integrity sha512-dDLF8pEO191hJMtlHFPRa8xsizHaM82MLfNkUHdUtVEV3tgTp5oj+8qbEqYM57SLfc74KSbw//4SeJma2LRVIw== + dependencies: + camelcase-css "^2.0.1" + +postcss-load-config@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/postcss-load-config/-/postcss-load-config-4.0.2.tgz#7159dcf626118d33e299f485d6afe4aff7c4a3e3" + integrity sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ== + dependencies: + lilconfig "^3.0.0" + yaml "^2.3.4" + +postcss-nested@^6.2.0: + version "6.2.0" + resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-6.2.0.tgz#4c2d22ab5f20b9cb61e2c5c5915950784d068131" + integrity sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ== + dependencies: + postcss-selector-parser "^6.1.1" + +postcss-selector-parser@6.0.10: + version "6.0.10" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz#79b61e2c0d1bfc2602d549e11d0876256f8df88d" + integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-selector-parser@^6.1.1, postcss-selector-parser@^6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz#27ecb41fb0e3b6ba7a1ec84fff347f734c7929de" + integrity sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-value-parser@^4.0.0, postcss-value-parser@^4.2.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz#723c09920836ba6d3e5af019f92bc0971c02e514" + integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== + +postcss@^8.4.47, postcss@^8.4.49, postcss@^8.5.1: + version "8.5.1" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.5.1.tgz#e2272a1f8a807fafa413218245630b5db10a3214" + integrity sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ== + dependencies: + nanoid "^3.3.8" + picocolors "^1.1.1" + source-map-js "^1.2.1" + +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + +read-cache@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774" + integrity sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA== + dependencies: + pify "^2.3.0" + +readdirp@^4.0.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-4.1.1.tgz#bd115327129672dc47f87408f05df9bd9ca3ef55" + integrity sha512-h80JrZu/MHUZCyHu5ciuoI0+WxsCxzxJTILn6Fs8rxSnFPh+UVHYfeIxK1nVGugMqkfC4vJcBOYbkfkwYK0+gw== + +readdirp@~3.6.0: + version "3.6.0" + resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== + dependencies: + picomatch "^2.2.1" + +resolve@^1.1.7, resolve@^1.22.1, resolve@^1.22.8: + version "1.22.10" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.10.tgz#b663e83ffb09bbf2386944736baae803029b8b39" + integrity sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w== + dependencies: + is-core-module "^2.16.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" + +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + +rollup@^4.23.0: + version "4.31.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.31.0.tgz#b84af969a0292cb047dce2c0ec5413a9457597a4" + integrity sha512-9cCE8P4rZLx9+PjoyqHLs31V9a9Vpvfo4qNcs6JCiGWYhw2gijSetFbH6SSy1whnkgcefnUwr8sad7tgqsGvnw== + dependencies: + "@types/estree" "1.0.6" + optionalDependencies: + "@rollup/rollup-android-arm-eabi" "4.31.0" + "@rollup/rollup-android-arm64" "4.31.0" + "@rollup/rollup-darwin-arm64" "4.31.0" + "@rollup/rollup-darwin-x64" "4.31.0" + "@rollup/rollup-freebsd-arm64" "4.31.0" + "@rollup/rollup-freebsd-x64" "4.31.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.31.0" + "@rollup/rollup-linux-arm-musleabihf" "4.31.0" + "@rollup/rollup-linux-arm64-gnu" "4.31.0" + "@rollup/rollup-linux-arm64-musl" "4.31.0" + "@rollup/rollup-linux-loongarch64-gnu" "4.31.0" + "@rollup/rollup-linux-powerpc64le-gnu" "4.31.0" + "@rollup/rollup-linux-riscv64-gnu" "4.31.0" + "@rollup/rollup-linux-s390x-gnu" "4.31.0" + "@rollup/rollup-linux-x64-gnu" "4.31.0" + "@rollup/rollup-linux-x64-musl" "4.31.0" + "@rollup/rollup-win32-arm64-msvc" "4.31.0" + "@rollup/rollup-win32-ia32-msvc" "4.31.0" + "@rollup/rollup-win32-x64-msvc" "4.31.0" + fsevents "~2.3.2" + +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + +rxjs@^7.4.0: + version "7.8.1" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" + integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== + dependencies: + tslib "^2.1.0" + +sade@^1.7.4, sade@^1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/sade/-/sade-1.8.1.tgz#0a78e81d658d394887be57d2a409bf703a3b2701" + integrity sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A== + dependencies: + mri "^1.1.0" + +sass-embedded-android-arm64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-android-arm64/-/sass-embedded-android-arm64-1.83.4.tgz#60af9d787e74276af95e4a1a1507567435bc61d2" + integrity sha512-tgX4FzmbVqnQmD67ZxQDvI+qFNABrboOQgwsG05E5bA/US42zGajW9AxpECJYiMXVOHmg+d81ICbjb0fsVHskw== + +sass-embedded-android-arm@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-android-arm/-/sass-embedded-android-arm-1.83.4.tgz#960953d094bf28c3e10a2e0ebd14459d4ec6e2d2" + integrity sha512-9Z4pJAOgEkXa3VDY/o+U6l5XvV0mZTJcSl0l/mSPHihjAHSpLYnOW6+KOWeM8dxqrsqTYcd6COzhanI/a++5Gw== + +sass-embedded-android-ia32@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-android-ia32/-/sass-embedded-android-ia32-1.83.4.tgz#2293cb9920181094edfa477ba503f1f187d21624" + integrity sha512-RsFOziFqPcfZXdFRULC4Ayzy9aK6R6FwQ411broCjlOBX+b0gurjRadkue3cfUEUR5mmy0KeCbp7zVKPLTK+5Q== + +sass-embedded-android-riscv64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-android-riscv64/-/sass-embedded-android-riscv64-1.83.4.tgz#84f86f2e96955a415343a2f24bae1af7bde26e5f" + integrity sha512-EHwh0nmQarBBrMRU928eTZkFGx19k/XW2YwbPR4gBVdWLkbTgCA5aGe8hTE6/1zStyx++3nDGvTZ78+b/VvvLg== + +sass-embedded-android-x64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-android-x64/-/sass-embedded-android-x64-1.83.4.tgz#8db3bb08b941889918f8435a97487cd84e7fd748" + integrity sha512-0PgQNuPWYy1jEOEPDVsV89KfqOsMLIp9CSbjBY7jRcwRhyVAcigqrUG6bDeNtojHUYKA1kU+Eh/85WxOHUOgBw== + +sass-embedded-darwin-arm64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-darwin-arm64/-/sass-embedded-darwin-arm64-1.83.4.tgz#d0f3d82eea999ab0ae7ec8abd7fa364f0defc75e" + integrity sha512-rp2ywymWc3nymnSnAFG5R/8hvxWCsuhK3wOnD10IDlmNB7o4rzKby1c+2ZfpQGowlYGWsWWTgz8FW2qzmZsQRw== + +sass-embedded-darwin-x64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-darwin-x64/-/sass-embedded-darwin-x64-1.83.4.tgz#cd2ac7f209fe823a8a5fc1a064cdfe2833680034" + integrity sha512-kLkN2lXz9PCgGfDS8Ev5YVcl/V2173L6379en/CaFuJJi7WiyPgBymW7hOmfCt4uO4R1y7CP2Uc08DRtZsBlAA== + +sass-embedded-linux-arm64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-arm64/-/sass-embedded-linux-arm64-1.83.4.tgz#057adf6e337357787331d40714cb8bba4a96dafe" + integrity sha512-E0zjsZX2HgESwyqw31EHtI39DKa7RgK7nvIhIRco1d0QEw227WnoR9pjH3M/ZQy4gQj3GKilOFHM5Krs/omeIA== + +sass-embedded-linux-arm@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-arm/-/sass-embedded-linux-arm-1.83.4.tgz#aea8b56f3844633f0bfaf13e0694c79511218fc0" + integrity sha512-nL90ryxX2lNmFucr9jYUyHHx21AoAgdCL1O5Ltx2rKg2xTdytAGHYo2MT5S0LIeKLa/yKP/hjuSvrbICYNDvtA== + +sass-embedded-linux-ia32@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-ia32/-/sass-embedded-linux-ia32-1.83.4.tgz#2cedba9f41be61ded3cede5abd16f8ec163d7f46" + integrity sha512-ew5HpchSzgAYbQoriRh8QhlWn5Kw2nQ2jHoV9YLwGKe3fwwOWA0KDedssvDv7FWnY/FCqXyymhLd6Bxae4Xquw== + +sass-embedded-linux-musl-arm64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-musl-arm64/-/sass-embedded-linux-musl-arm64-1.83.4.tgz#1c5f50c9df93abce7d5ffb4d86eed65b8ffba2f4" + integrity sha512-IzMgalf6MZOxgp4AVCgsaWAFDP/IVWOrgVXxkyhw29fyAEoSWBJH4k87wyPhEtxSuzVHLxKNbc8k3UzdWmlBFg== + +sass-embedded-linux-musl-arm@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-musl-arm/-/sass-embedded-linux-musl-arm-1.83.4.tgz#00f241dbc750ee73242bfde1ec5d64ef2d5d7956" + integrity sha512-0RrJRwMrmm+gG0VOB5b5Cjs7Sd+lhqpQJa6EJNEaZHljJokEfpE5GejZsGMRMIQLxEvVphZnnxl6sonCGFE/QQ== + +sass-embedded-linux-musl-ia32@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-musl-ia32/-/sass-embedded-linux-musl-ia32-1.83.4.tgz#27537a309d39f8e35a7dba34a3edc29a3ee16adf" + integrity sha512-LLb4lYbcxPzX4UaJymYXC+WwokxUlfTJEFUv5VF0OTuSsHAGNRs/rslPtzVBTvMeG9TtlOQDhku1F7G6iaDotA== + +sass-embedded-linux-musl-riscv64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-musl-riscv64/-/sass-embedded-linux-musl-riscv64-1.83.4.tgz#a32edf2ddb7f7d9b526e971e80cadef1e025cce8" + integrity sha512-zoKlPzD5Z13HKin1UGR74QkEy+kZEk2AkGX5RelRG494mi+IWwRuWCppXIovor9+BQb9eDWPYPoMVahwN5F7VA== + +sass-embedded-linux-musl-x64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-musl-x64/-/sass-embedded-linux-musl-x64-1.83.4.tgz#668b90b80bf35830c2f1ea2a47557d5e60842598" + integrity sha512-hB8+/PYhfEf2zTIcidO5Bpof9trK6WJjZ4T8g2MrxQh8REVtdPcgIkoxczRynqybf9+fbqbUwzXtiUao2GV+vQ== + +sass-embedded-linux-riscv64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-riscv64/-/sass-embedded-linux-riscv64-1.83.4.tgz#b7718df2adf1cbcb4c26609215018dd2e8bab595" + integrity sha512-83fL4n+oeDJ0Y4KjASmZ9jHS1Vl9ESVQYHMhJE0i4xDi/P3BNarm2rsKljq/QtrwGpbqwn8ujzOu7DsNCMDSHA== + +sass-embedded-linux-x64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-linux-x64/-/sass-embedded-linux-x64-1.83.4.tgz#52e61bd582dfc56b8f638f2b9cfdb8a53db1e57e" + integrity sha512-NlnGdvCmTD5PK+LKXlK3sAuxOgbRIEoZfnHvxd157imCm/s2SYF/R28D0DAAjEViyI8DovIWghgbcqwuertXsA== + +sass-embedded-win32-arm64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-win32-arm64/-/sass-embedded-win32-arm64-1.83.4.tgz#b6ca8f65177e24770e87e43ffea5868fea34de27" + integrity sha512-J2BFKrEaeSrVazU2qTjyQdAk+MvbzJeTuCET0uAJEXSKtvQ3AzxvzndS7LqkDPbF32eXAHLw8GVpwcBwKbB3Uw== + +sass-embedded-win32-ia32@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-win32-ia32/-/sass-embedded-win32-ia32-1.83.4.tgz#94f8da72e253532f8d857516b99e1caf61e7b08f" + integrity sha512-uPAe9T/5sANFhJS5dcfAOhOJy8/l2TRYG4r+UO3Wp4yhqbN7bggPvY9c7zMYS0OC8tU/bCvfYUDFHYMCl91FgA== + +sass-embedded-win32-x64@1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded-win32-x64/-/sass-embedded-win32-x64-1.83.4.tgz#2179d4e2fc2f9086aecd64209a2d84f7d8e9edbe" + integrity sha512-C9fkDY0jKITdJFij4UbfPFswxoXN9O/Dr79v17fJnstVwtUojzVJWKHUXvF0Zg2LIR7TCc4ju3adejKFxj7ueA== + +sass-embedded@^1.83.4: + version "1.83.4" + resolved "https://registry.yarnpkg.com/sass-embedded/-/sass-embedded-1.83.4.tgz#9b05cdc22ae71a1b27b5996a39054ba59bebc04a" + integrity sha512-Hf2burRA/y5PGxsg6jB9UpoK/xZ6g/pgrkOcdl6j+rRg1Zj8XhGKZ1MTysZGtTPUUmiiErqzkP5+Kzp95yv9GQ== + dependencies: + "@bufbuild/protobuf" "^2.0.0" + buffer-builder "^0.2.0" + colorjs.io "^0.5.0" + immutable "^5.0.2" + rxjs "^7.4.0" + supports-color "^8.1.1" + sync-child-process "^1.0.2" + varint "^6.0.0" + optionalDependencies: + sass-embedded-android-arm "1.83.4" + sass-embedded-android-arm64 "1.83.4" + sass-embedded-android-ia32 "1.83.4" + sass-embedded-android-riscv64 "1.83.4" + sass-embedded-android-x64 "1.83.4" + sass-embedded-darwin-arm64 "1.83.4" + sass-embedded-darwin-x64 "1.83.4" + sass-embedded-linux-arm "1.83.4" + sass-embedded-linux-arm64 "1.83.4" + sass-embedded-linux-ia32 "1.83.4" + sass-embedded-linux-musl-arm "1.83.4" + sass-embedded-linux-musl-arm64 "1.83.4" + sass-embedded-linux-musl-ia32 "1.83.4" + sass-embedded-linux-musl-riscv64 "1.83.4" + sass-embedded-linux-musl-x64 "1.83.4" + sass-embedded-linux-riscv64 "1.83.4" + sass-embedded-linux-x64 "1.83.4" + sass-embedded-win32-arm64 "1.83.4" + sass-embedded-win32-ia32 "1.83.4" + sass-embedded-win32-x64 "1.83.4" + +set-cookie-parser@^2.6.0: + version "2.7.1" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz#3016f150072202dfbe90fadee053573cc89d2943" + integrity sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ== + +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + +signal-exit@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-4.1.0.tgz#952188c1cbd546070e2dd20d0f41c0ae0530cb04" + integrity sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw== + +sirv@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/sirv/-/sirv-3.0.0.tgz#f8d90fc528f65dff04cb597a88609d4e8a4361ce" + integrity sha512-BPwJGUeDaDCHihkORDchNyyTvWFhcusy1XMmhEVTQTwGeybFbp8YEmB+njbPnth1FibULBSBVwCQni25XlCUDg== + dependencies: + "@polka/url" "^1.0.0-next.24" + mrmime "^2.0.0" + totalist "^3.0.0" + +source-map-js@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== + +"string-width-cjs@npm:string-width@^4.2.0": + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.1.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^5.0.1, string-width@^5.1.2: + version "5.1.2" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-5.1.2.tgz#14f8daec6d81e7221d2a357e668cab73bdbca794" + integrity sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA== + dependencies: + eastasianwidth "^0.2.0" + emoji-regex "^9.2.2" + strip-ansi "^7.0.1" + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1": + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== + dependencies: + ansi-regex "^5.0.1" + +strip-ansi@^7.0.1: + version "7.1.0" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-7.1.0.tgz#d5b6568ca689d8561370b0707685d22434faff45" + integrity sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ== + dependencies: + ansi-regex "^6.0.1" + +sucrase@^3.35.0: + version "3.35.0" + resolved "https://registry.yarnpkg.com/sucrase/-/sucrase-3.35.0.tgz#57f17a3d7e19b36d8995f06679d121be914ae263" + integrity sha512-8EbVDiu9iN/nESwxeSxDKe0dunta1GOlHufmSSXxMD2z2/tMZpDMpvXQGsc+ajGo8y2uYUmixaSRUc/QPoQ0GA== + dependencies: + "@jridgewell/gen-mapping" "^0.3.2" + commander "^4.0.0" + glob "^10.3.10" + lines-and-columns "^1.1.6" + mz "^2.7.0" + pirates "^4.0.1" + ts-interface-checker "^0.1.9" + +supports-color@^8.1.1: + version "8.1.1" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== + dependencies: + has-flag "^4.0.0" + +supports-preserve-symlinks-flag@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== + +svelte-check@^4.0.0: + version "4.1.4" + resolved "https://registry.yarnpkg.com/svelte-check/-/svelte-check-4.1.4.tgz#59ec6f08d23647ec508ff01584ef6d191c77c9e1" + integrity sha512-v0j7yLbT29MezzaQJPEDwksybTE2Ups9rUxEXy92T06TiA0cbqcO8wAOwNUVkFW6B0hsYHA+oAX3BS8b/2oHtw== + dependencies: + "@jridgewell/trace-mapping" "^0.3.25" + chokidar "^4.0.1" + fdir "^6.2.0" + picocolors "^1.0.0" + sade "^1.7.4" + +svelte-preprocess@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/svelte-preprocess/-/svelte-preprocess-6.0.3.tgz#fdc1f9dc41b6f22bf8b1f059e9f21eaaae181eeb" + integrity sha512-PLG2k05qHdhmRG7zR/dyo5qKvakhm8IJ+hD2eFRQmMLHp7X3eJnjeupUtvuRpbNiF31RjVw45W+abDwHEmP5OA== + +svelte-split-pane@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/svelte-split-pane/-/svelte-split-pane-0.1.2.tgz#aae94c583fdcf80e3ceaefc31755f3c49886e6a8" + integrity sha512-JluEydC9v2DetyhlwCF3CdqAkET8XPHP7WeWbl4lVLOg55avDOhoS5U6BRhvWd104HOqhUcCSz+7Nveyjmzjeg== + +svelte-splitpanes@^8.0.9: + version "8.0.9" + resolved "https://registry.yarnpkg.com/svelte-splitpanes/-/svelte-splitpanes-8.0.9.tgz#4103765febb58f847db8ef115e0e1b91462fc70b" + integrity sha512-L3oLXTC99M191FInTXJ/f/2i0welRql1QuVbPaU8iy6nvCR6X9VyjHCsCpLqKGWHwqkWo/AM9CQ1c0nzlb+MkA== + dependencies: + esm-env-robust "0.0.3" + +svelte@^5.0.0: + version "5.19.0" + resolved "https://registry.yarnpkg.com/svelte/-/svelte-5.19.0.tgz#bfe63f5fc794c5bbd2240435b9050fff09680833" + integrity sha512-qvd2GvvYnJxS/MteQKFSMyq8cQrAAut28QZ39ySv9k3ggmhw4Au4Rfcsqva74i0xMys//OhbhVCNfXPrDzL/Bg== + dependencies: + "@ampproject/remapping" "^2.3.0" + "@jridgewell/sourcemap-codec" "^1.5.0" + "@types/estree" "^1.0.5" + acorn "^8.12.1" + acorn-typescript "^1.4.13" + aria-query "^5.3.1" + axobject-query "^4.1.0" + clsx "^2.1.1" + esm-env "^1.2.1" + esrap "^1.4.3" + is-reference "^3.0.3" + locate-character "^3.0.0" + magic-string "^0.30.11" + zimmerframe "^1.1.2" + +svg.draggable.js@^2.2.2: + version "2.2.2" + resolved "https://registry.yarnpkg.com/svg.draggable.js/-/svg.draggable.js-2.2.2.tgz#c514a2f1405efb6f0263e7958f5b68fce50603ba" + integrity sha512-JzNHBc2fLQMzYCZ90KZHN2ohXL0BQJGQimK1kGk6AvSeibuKcIdDX9Kr0dT9+UJ5O8nYA0RB839Lhvk4CY4MZw== + dependencies: + svg.js "^2.0.1" + +svg.easing.js@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/svg.easing.js/-/svg.easing.js-2.0.0.tgz#8aa9946b0a8e27857a5c40a10eba4091e5691f12" + integrity sha512-//ctPdJMGy22YoYGV+3HEfHbm6/69LJUTAqI2/5qBvaNHZ9uUFVC82B0Pl299HzgH13rKrBgi4+XyXXyVWWthA== + dependencies: + svg.js ">=2.3.x" + +svg.filter.js@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/svg.filter.js/-/svg.filter.js-2.0.2.tgz#91008e151389dd9230779fcbe6e2c9a362d1c203" + integrity sha512-xkGBwU+dKBzqg5PtilaTb0EYPqPfJ9Q6saVldX+5vCRy31P6TlRCP3U9NxH3HEufkKkpNgdTLBJnmhDHeTqAkw== + dependencies: + svg.js "^2.2.5" + +svg.js@>=2.3.x, svg.js@^2.0.1, svg.js@^2.2.5, svg.js@^2.4.0, svg.js@^2.6.5: + version "2.7.1" + resolved "https://registry.yarnpkg.com/svg.js/-/svg.js-2.7.1.tgz#eb977ed4737001eab859949b4a398ee1bb79948d" + integrity sha512-ycbxpizEQktk3FYvn/8BH+6/EuWXg7ZpQREJvgacqn46gIddG24tNNe4Son6omdXCnSOaApnpZw6MPCBA1dODA== + +svg.pathmorphing.js@^0.1.3: + version "0.1.3" + resolved "https://registry.yarnpkg.com/svg.pathmorphing.js/-/svg.pathmorphing.js-0.1.3.tgz#c25718a1cc7c36e852ecabc380e758ac09bb2b65" + integrity sha512-49HWI9X4XQR/JG1qXkSDV8xViuTLIWm/B/7YuQELV5KMOPtXjiwH4XPJvr/ghEDibmLQ9Oc22dpWpG0vUDDNww== + dependencies: + svg.js "^2.4.0" + +svg.resize.js@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/svg.resize.js/-/svg.resize.js-1.4.3.tgz#885abd248e0cd205b36b973c4b578b9a36f23332" + integrity sha512-9k5sXJuPKp+mVzXNvxz7U0uC9oVMQrrf7cFsETznzUDDm0x8+77dtZkWdMfRlmbkEEYvUn9btKuZ3n41oNA+uw== + dependencies: + svg.js "^2.6.5" + svg.select.js "^2.1.2" + +svg.select.js@^2.1.2: + version "2.1.2" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-2.1.2.tgz#e41ce13b1acff43a7441f9f8be87a2319c87be73" + integrity sha512-tH6ABEyJsAOVAhwcCjF8mw4crjXSI1aa7j2VQR8ZuJ37H2MBUbyeqYr5nEO7sSN3cy9AR9DUwNg0t/962HlDbQ== + dependencies: + svg.js "^2.2.5" + +svg.select.js@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/svg.select.js/-/svg.select.js-3.0.1.tgz#a4198e359f3825739226415f82176a90ea5cc917" + integrity sha512-h5IS/hKkuVCbKSieR9uQCj9w+zLHoPh+ce19bBYyqF53g6mnPB8sAtIbe1s9dh2S2fCmYX2xel1Ln3PJBbK4kw== + dependencies: + svg.js "^2.6.5" + +sync-child-process@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/sync-child-process/-/sync-child-process-1.0.2.tgz#45e7c72e756d1243e80b547ea2e17957ab9e367f" + integrity sha512-8lD+t2KrrScJ/7KXCSyfhT3/hRq78rC0wBFqNJXv3mZyn6hW2ypM05JmlSvtqRbeq6jqA94oHbxAr2vYsJ8vDA== + dependencies: + sync-message-port "^1.0.0" + +sync-message-port@^1.0.0: + version "1.1.3" + resolved "https://registry.yarnpkg.com/sync-message-port/-/sync-message-port-1.1.3.tgz#6055c565ee8c81d2f9ee5aae7db757e6d9088c0c" + integrity sha512-GTt8rSKje5FilG+wEdfCkOcLL7LWqpMlr2c3LRuKt/YXxcJ52aGSbGBAdI4L3aaqfrBt6y711El53ItyH1NWzg== + +tailwind-merge@^2.5.4: + version "2.6.0" + resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.6.0.tgz#ac5fb7e227910c038d458f396b7400d93a3142d5" + integrity sha512-P+Vu1qXfzediirmHOC3xKGAYeZtPcV9g76X+xg2FD4tYgR71ewMA35Y3sCz3zhiN/dwefRpJX0yBcgwi1fXNQA== + +tailwindcss@^3.4.14, tailwindcss@^3.4.17: + version "3.4.17" + resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-3.4.17.tgz#ae8406c0f96696a631c790768ff319d46d5e5a63" + integrity sha512-w33E2aCvSDP0tW9RZuNXadXlkHXqFzSkQew/aIa2i/Sj8fThxwovwlXHSPXTbAHwEIhBFXAedUhP2tueAKP8Og== + dependencies: + "@alloc/quick-lru" "^5.2.0" + arg "^5.0.2" + chokidar "^3.6.0" + didyoumean "^1.2.2" + dlv "^1.1.3" + fast-glob "^3.3.2" + glob-parent "^6.0.2" + is-glob "^4.0.3" + jiti "^1.21.6" + lilconfig "^3.1.3" + micromatch "^4.0.8" + normalize-path "^3.0.0" + object-hash "^3.0.0" + picocolors "^1.1.1" + postcss "^8.4.47" + postcss-import "^15.1.0" + postcss-js "^4.0.1" + postcss-load-config "^4.0.2" + postcss-nested "^6.2.0" + postcss-selector-parser "^6.1.2" + resolve "^1.22.8" + sucrase "^3.35.0" + +thenify-all@^1.0.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" + integrity sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA== + dependencies: + thenify ">= 3.1.0 < 4" + +"thenify@>= 3.1.0 < 4": + version "3.3.1" + resolved "https://registry.yarnpkg.com/thenify/-/thenify-3.3.1.tgz#8932e686a4066038a016dd9e2ca46add9838a95f" + integrity sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw== + dependencies: + any-promise "^1.0.0" + +to-regex-range@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== + dependencies: + is-number "^7.0.0" + +totalist@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" + integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== + +ts-interface-checker@^0.1.9: + version "0.1.13" + resolved "https://registry.yarnpkg.com/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz#784fd3d679722bc103b1b4b8030bcddb5db2a699" + integrity sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== + +tslib@^2.1.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + +typescript@~5.6.2: + version "5.6.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.3.tgz#5f3449e31c9d94febb17de03cc081dd56d81db5b" + integrity sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw== + +update-browserslist-db@^1.1.1: + version "1.1.2" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.2.tgz#97e9c96ab0ae7bcac08e9ae5151d26e6bc6b5580" + integrity sha512-PPypAm5qvlD7XMZC3BujecnaOxwhrtoFR+Dqkk5Aa/6DssiH0ibKoketaj9w8LP7Bont1rYeoV5plxD7RTEPRg== + dependencies: + escalade "^3.2.0" + picocolors "^1.1.1" + +util-deprecate@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== + +varint@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/varint/-/varint-6.0.0.tgz#9881eb0ce8feaea6512439d19ddf84bf551661d0" + integrity sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg== + +vite@^6.0.3: + version "6.0.9" + resolved "https://registry.yarnpkg.com/vite/-/vite-6.0.9.tgz#0a830b767ef7aa762360b56bdef955c1395dc1ee" + integrity sha512-MSgUxHcaXLtnBPktkbUSoQUANApKYuxZ6DrbVENlIorbhL2dZydTLaZ01tjUoE3szeFzlFk9ANOKk0xurh4MKA== + dependencies: + esbuild "^0.24.2" + postcss "^8.4.49" + rollup "^4.23.0" + optionalDependencies: + fsevents "~2.3.3" + +vitefu@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/vitefu/-/vitefu-1.0.5.tgz#eab501e07da167bbb68e957685823e6b425e7ce2" + integrity sha512-h4Vflt9gxODPFNGPwp4zAMZRpZR7eslzwH2c5hn5kNZ5rhnKyRJ50U+yGCdc2IRaBs8O4haIgLNGrV5CrpMsCA== + +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + +wrap-ansi@^8.1.0: + version "8.1.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214" + integrity sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ== + dependencies: + ansi-styles "^6.1.0" + string-width "^5.0.1" + strip-ansi "^7.0.1" + +yaml@^2.3.4: + version "2.7.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.7.0.tgz#aef9bb617a64c937a9a748803786ad8d3ffe1e98" + integrity sha512-+hSoy/QHluxmC9kCIJyL/uyFmLmc+e5CFR5Wa+bpIhIj85LVb9ZH2nVnqrHoSvKogwODv0ClqZkmiSSaIH5LTA== + +zimmerframe@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/zimmerframe/-/zimmerframe-1.1.2.tgz#5b75f1fa83b07ae2a428d51e50f58e2ae6855e5e" + integrity sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==