Compare commits
8 Commits
80ddafc4e6
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
| 243f107ef5 | |||
| 9ad4e7972b | |||
| 6eec0890a7 | |||
| 8654d160b6 | |||
| 4452508dd4 | |||
| 1697523000 | |||
| eee4e6523e | |||
| 150992aaac |
10
.clinerules/lib.md
Normal file
10
.clinerules/lib.md
Normal file
@ -0,0 +1,10 @@
|
||||
# Library Functions
|
||||
|
||||
## PNG Metadata
|
||||
Use this file `src/lib/util.ts` for embedding and reading JSON data from PNG files.
|
||||
|
||||
### Embed JSON to PNG
|
||||
Use this method `embedJsonToPng(path, obj)`
|
||||
|
||||
### Read JSON from PNG
|
||||
Use this method `readJsonToPng(path)`
|
||||
618
package-lock.json
generated
618
package-lock.json
generated
@ -10,15 +10,27 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@types/axios": "^0.14.4",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
"@types/sharp": "^0.32.0",
|
||||
"axios": "^1.11.0",
|
||||
"dotenv": "^17.2.1",
|
||||
"fs-extra": "^11.3.2",
|
||||
"mysql2": "^3.14.3",
|
||||
"open": "^10.2.0",
|
||||
"png-chunk-text": "^1.0.0",
|
||||
"png-chunks-encode": "^1.0.0",
|
||||
"png-chunks-extract": "^1.0.0",
|
||||
"pngjs": "^7.0.0",
|
||||
"puppeteer": "^24.16.2",
|
||||
"sharp": "^0.34.4",
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/node": "^20.19.19",
|
||||
"@types/png-chunk-text": "^1.0.3",
|
||||
"@types/png-chunks-encode": "^1.0.2",
|
||||
"@types/png-chunks-extract": "^1.0.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.0.0"
|
||||
}
|
||||
@ -56,6 +68,419 @@
|
||||
"node": ">=12"
|
||||
}
|
||||
},
|
||||
"node_modules/@emnapi/runtime": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.5.0.tgz",
|
||||
"integrity": "sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==",
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/colour": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/@img/colour/-/colour-1.0.0.tgz",
|
||||
"integrity": "sha512-A5P/LfWGFSl6nsckYtjw9da+19jB8hkJ6ACTGcDfEJ0aE+l2n2El7dsVM7UVHZQ9s2lmYMWlrS21YLy2IR1LUw==",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-darwin-arm64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-arm64/-/sharp-darwin-arm64-0.34.4.tgz",
|
||||
"integrity": "sha512-sitdlPzDVyvmINUdJle3TNHl+AG9QcwiAMsXmccqsCOMZNIdW2/7S26w0LyU8euiLVzFBL3dXPwVCq/ODnf2vA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-darwin-arm64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-darwin-x64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-darwin-x64/-/sharp-darwin-x64-0.34.4.tgz",
|
||||
"integrity": "sha512-rZheupWIoa3+SOdF/IcUe1ah4ZDpKBGWcsPX6MT0lYniH9micvIU7HQkYTfrx5Xi8u+YqwLtxC/3vl8TQN6rMg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-darwin-x64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-darwin-arm64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-arm64/-/sharp-libvips-darwin-arm64-1.2.3.tgz",
|
||||
"integrity": "sha512-QzWAKo7kpHxbuHqUC28DZ9pIKpSi2ts2OJnoIGI26+HMgq92ZZ4vk8iJd4XsxN+tYfNJxzH6W62X5eTcsBymHw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-darwin-x64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-darwin-x64/-/sharp-libvips-darwin-x64-1.2.3.tgz",
|
||||
"integrity": "sha512-Ju+g2xn1E2AKO6YBhxjj+ACcsPQRHT0bhpglxcEf+3uyPY+/gL8veniKoo96335ZaPo03bdDXMv0t+BBFAbmRA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-arm": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm/-/sharp-libvips-linux-arm-1.2.3.tgz",
|
||||
"integrity": "sha512-x1uE93lyP6wEwGvgAIV0gP6zmaL/a0tGzJs/BIDDG0zeBhMnuUPm7ptxGhUbcGs4okDJrk4nxgrmxpib9g6HpA==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-arm64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-arm64/-/sharp-libvips-linux-arm64-1.2.3.tgz",
|
||||
"integrity": "sha512-I4RxkXU90cpufazhGPyVujYwfIm9Nk1QDEmiIsaPwdnm013F7RIceaCc87kAH+oUB1ezqEvC6ga4m7MSlqsJvQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-ppc64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-ppc64/-/sharp-libvips-linux-ppc64-1.2.3.tgz",
|
||||
"integrity": "sha512-Y2T7IsQvJLMCBM+pmPbM3bKT/yYJvVtLJGfCs4Sp95SjvnFIjynbjzsa7dY1fRJX45FTSfDksbTp6AGWudiyCg==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-s390x": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-s390x/-/sharp-libvips-linux-s390x-1.2.3.tgz",
|
||||
"integrity": "sha512-RgWrs/gVU7f+K7P+KeHFaBAJlNkD1nIZuVXdQv6S+fNA6syCcoboNjsV2Pou7zNlVdNQoQUpQTk8SWDHUA3y/w==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linux-x64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linux-x64/-/sharp-libvips-linux-x64-1.2.3.tgz",
|
||||
"integrity": "sha512-3JU7LmR85K6bBiRzSUc/Ff9JBVIFVvq6bomKE0e63UXGeRw2HPVEjoJke1Yx+iU4rL7/7kUjES4dZ/81Qjhyxg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linuxmusl-arm64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-arm64/-/sharp-libvips-linuxmusl-arm64-1.2.3.tgz",
|
||||
"integrity": "sha512-F9q83RZ8yaCwENw1GieztSfj5msz7GGykG/BA+MOUefvER69K/ubgFHNeSyUu64amHIYKGDs4sRCMzXVj8sEyw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-libvips-linuxmusl-x64": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-libvips-linuxmusl-x64/-/sharp-libvips-linuxmusl-x64-1.2.3.tgz",
|
||||
"integrity": "sha512-U5PUY5jbc45ANM6tSJpsgqmBF/VsL6LnxJmIf11kB7J5DctHgqm0SkuXzVWtIY90GnJxKnC/JT251TDnk1fu/g==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-arm": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm/-/sharp-linux-arm-0.34.4.tgz",
|
||||
"integrity": "sha512-Xyam4mlqM0KkTHYVSuc6wXRmM7LGN0P12li03jAnZ3EJWZqj83+hi8Y9UxZUbxsgsK1qOEwg7O0Bc0LjqQVtxA==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-arm": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-arm64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-arm64/-/sharp-linux-arm64-0.34.4.tgz",
|
||||
"integrity": "sha512-YXU1F/mN/Wu786tl72CyJjP/Ngl8mGHN1hST4BGl+hiW5jhCnV2uRVTNOcaYPs73NeT/H8Upm3y9582JVuZHrQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-arm64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-ppc64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-ppc64/-/sharp-linux-ppc64-0.34.4.tgz",
|
||||
"integrity": "sha512-F4PDtF4Cy8L8hXA2p3TO6s4aDt93v+LKmpcYFLAVdkkD3hSxZzee0rh6/+94FpAynsuMpLX5h+LRsSG3rIciUQ==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-ppc64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-s390x": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-s390x/-/sharp-linux-s390x-0.34.4.tgz",
|
||||
"integrity": "sha512-qVrZKE9Bsnzy+myf7lFKvng6bQzhNUAYcVORq2P7bDlvmF6u2sCmK2KyEQEBdYk+u3T01pVsPrkj943T1aJAsw==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-s390x": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linux-x64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linux-x64/-/sharp-linux-x64-0.34.4.tgz",
|
||||
"integrity": "sha512-ZfGtcp2xS51iG79c6Vhw9CWqQC8l2Ot8dygxoDoIQPTat/Ov3qAa8qpxSrtAEAJW+UjTXc4yxCjNfxm4h6Xm2A==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linux-x64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linuxmusl-arm64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-arm64/-/sharp-linuxmusl-arm64-0.34.4.tgz",
|
||||
"integrity": "sha512-8hDVvW9eu4yHWnjaOOR8kHVrew1iIX+MUgwxSuH2XyYeNRtLUe4VNioSqbNkB7ZYQJj9rUTT4PyRscyk2PXFKA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linuxmusl-arm64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-linuxmusl-x64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-linuxmusl-x64/-/sharp-linuxmusl-x64-0.34.4.tgz",
|
||||
"integrity": "sha512-lU0aA5L8QTlfKjpDCEFOZsTYGn3AEiO6db8W5aQDxj0nQkVrZWmN3ZP9sYKWJdtq3PWPhUNlqehWyXpYDcI9Sg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-libvips-linuxmusl-x64": "1.2.3"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-wasm32": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-wasm32/-/sharp-wasm32-0.34.4.tgz",
|
||||
"integrity": "sha512-33QL6ZO/qpRyG7woB/HUALz28WnTMI2W1jgX3Nu2bypqLIKx/QKMILLJzJjI+SIbvXdG9fUnmrxR7vbi1sTBeA==",
|
||||
"cpu": [
|
||||
"wasm32"
|
||||
],
|
||||
"optional": true,
|
||||
"dependencies": {
|
||||
"@emnapi/runtime": "^1.5.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-win32-arm64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-win32-arm64/-/sharp-win32-arm64-0.34.4.tgz",
|
||||
"integrity": "sha512-2Q250do/5WXTwxW3zjsEuMSv5sUU4Tq9VThWKlU2EYLm4MB7ZeMwF+SFJutldYODXF6jzc6YEOC+VfX0SZQPqA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-win32-ia32": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-win32-ia32/-/sharp-win32-ia32-0.34.4.tgz",
|
||||
"integrity": "sha512-3ZeLue5V82dT92CNL6rsal6I2weKw1cYu+rGKm8fOCCtJTR2gYeUfY3FqUnIJsMUPIH68oS5jmZ0NiJ508YpEw==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@img/sharp-win32-x64": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/@img/sharp-win32-x64/-/sharp-win32-x64-0.34.4.tgz",
|
||||
"integrity": "sha512-xIyj4wpYs8J18sVN3mSQjwrw7fKUqRw+Z5rnHNCy5fYTxigBz81u5mOMPmFumwjcn8+ld1ppptMBCLic1nz6ig==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
}
|
||||
},
|
||||
"node_modules/@jridgewell/resolve-uri": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
||||
@ -139,15 +564,66 @@
|
||||
"axios": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/fs-extra": {
|
||||
"version": "11.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/fs-extra/-/fs-extra-11.0.4.tgz",
|
||||
"integrity": "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==",
|
||||
"dependencies": {
|
||||
"@types/jsonfile": "*",
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/jsonfile": {
|
||||
"version": "6.1.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/jsonfile/-/jsonfile-6.1.4.tgz",
|
||||
"integrity": "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.19.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.11.tgz",
|
||||
"integrity": "sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow==",
|
||||
"devOptional": true,
|
||||
"version": "20.19.19",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.19.tgz",
|
||||
"integrity": "sha512-pb1Uqj5WJP7wrcbLU7Ru4QtA0+3kAXrkutGiD26wUKzSMgNNaPARTUDQmElUXp64kh3cWdou3Q0C7qwwxqSFmg==",
|
||||
"dependencies": {
|
||||
"undici-types": "~6.21.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/png-chunk-text": {
|
||||
"version": "1.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/png-chunk-text/-/png-chunk-text-1.0.3.tgz",
|
||||
"integrity": "sha512-7keEFz73uNJ9Ar1XMCNnHEXT9pICJnouMQCCYgBEmHMgdkXaQzSTmSvr6tUDSqgdEgmlRAxZd97wprgliyZoCg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/png-chunks-encode": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/png-chunks-encode/-/png-chunks-encode-1.0.2.tgz",
|
||||
"integrity": "sha512-Dxn0aXEcSg1wVeHjvNlygm/+fKBDzWMCdxJYhjGUTeefFW/jYxWcrg+W7ppLBfH44iJMqeVBHtHBwtYQUeYvgw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/png-chunks-extract": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@types/png-chunks-extract/-/png-chunks-extract-1.0.2.tgz",
|
||||
"integrity": "sha512-z6djfFIbrrddtunoMJBOPlyZrnmeuG1kkvHUNi2QfpOb+JMMLuLliHHTmMyRi7k7LiTAut0HbdGCF6ibDtQAHQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/pngjs": {
|
||||
"version": "6.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/pngjs/-/pngjs-6.0.5.tgz",
|
||||
"integrity": "sha512-0k5eKfrA83JOZPppLtS2C7OUtyNAl2wKNxfyYl9Q5g9lPkgBl/9hNyAu6HuEH2J4XmIv2znEpkDd0SaZVxW6iQ==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/sharp": {
|
||||
"version": "0.32.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/sharp/-/sharp-0.32.0.tgz",
|
||||
"integrity": "sha512-OOi3kL+FZDnPhVzsfD37J88FNeZh6gQsGcLc95NbeURRGvmSjeXiDcyWzF2o3yh/gQAUn2uhh/e+CPCa5nwAxw==",
|
||||
"deprecated": "This is a stub types definition. sharp provides its own type definitions, so you do not need this installed.",
|
||||
"dependencies": {
|
||||
"sharp": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/yauzl": {
|
||||
"version": "2.10.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/yauzl/-/yauzl-2.10.3.tgz",
|
||||
@ -455,6 +931,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/crc-32": {
|
||||
"version": "0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/crc-32/-/crc-32-0.3.0.tgz",
|
||||
"integrity": "sha512-kucVIjOmMc1f0tv53BJ/5WIX+MGLcKuoBhnGqQrgKJNqLByb/sVMWfW/Aw6hw0jgcqjJ2pi9E5y32zOIpaUlsA==",
|
||||
"engines": {
|
||||
"node": ">=0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/create-require": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
||||
@ -551,6 +1035,14 @@
|
||||
"node": ">=0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/detect-libc": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.1.tgz",
|
||||
"integrity": "sha512-ecqj/sy1jcK1uWrwpR67UhYrIFQ+5WlGxth34WquCbamhFA6hkkwiu37o6J5xCHdo1oixJRfVRw+ywV+Hq/0Aw==",
|
||||
"engines": {
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/devtools-protocol": {
|
||||
"version": "0.0.1475386",
|
||||
"resolved": "https://registry.npmjs.org/devtools-protocol/-/devtools-protocol-0.0.1475386.tgz",
|
||||
@ -781,6 +1273,19 @@
|
||||
"node": ">= 6"
|
||||
}
|
||||
},
|
||||
"node_modules/fs-extra": {
|
||||
"version": "11.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz",
|
||||
"integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^6.0.1",
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.14"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||
@ -878,6 +1383,11 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/graceful-fs": {
|
||||
"version": "4.2.11",
|
||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
||||
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
@ -1056,6 +1566,17 @@
|
||||
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
|
||||
"integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="
|
||||
},
|
||||
"node_modules/jsonfile": {
|
||||
"version": "6.2.0",
|
||||
"resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz",
|
||||
"integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==",
|
||||
"dependencies": {
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
}
|
||||
},
|
||||
"node_modules/lines-and-columns": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
|
||||
@ -1262,6 +1783,36 @@
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="
|
||||
},
|
||||
"node_modules/png-chunk-text": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/png-chunk-text/-/png-chunk-text-1.0.0.tgz",
|
||||
"integrity": "sha512-DEROKU3SkkLGWNMzru3xPVgxyd48UGuMSZvioErCure6yhOc/pRH2ZV+SEn7nmaf7WNf3NdIpH+UTrRdKyq9Lw=="
|
||||
},
|
||||
"node_modules/png-chunks-encode": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/png-chunks-encode/-/png-chunks-encode-1.0.0.tgz",
|
||||
"integrity": "sha512-J1jcHgbQRsIIgx5wxW9UmCymV3wwn4qCCJl6KYgEU/yHCh/L2Mwq/nMOkRPtmV79TLxRZj5w3tH69pvygFkDqA==",
|
||||
"dependencies": {
|
||||
"crc-32": "^0.3.0",
|
||||
"sliced": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/png-chunks-extract": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/png-chunks-extract/-/png-chunks-extract-1.0.0.tgz",
|
||||
"integrity": "sha512-ZiVwF5EJ0DNZyzAqld8BP1qyJBaGOFaq9zl579qfbkcmOwWLLO4I9L8i2O4j3HkI6/35i0nKG2n+dZplxiT89Q==",
|
||||
"dependencies": {
|
||||
"crc-32": "^0.3.0"
|
||||
}
|
||||
},
|
||||
"node_modules/pngjs": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz",
|
||||
"integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==",
|
||||
"engines": {
|
||||
"node": ">=14.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/progress": {
|
||||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
|
||||
@ -1386,6 +1937,52 @@
|
||||
"resolved": "https://registry.npmjs.org/seq-queue/-/seq-queue-0.0.5.tgz",
|
||||
"integrity": "sha512-hr3Wtp/GZIc/6DAGPDcV4/9WoZhjrkXsi5B/07QgX8tsdc6ilr7BFM6PM6rbdAX1kFSDYeZGLipIZZKyQP0O5Q=="
|
||||
},
|
||||
"node_modules/sharp": {
|
||||
"version": "0.34.4",
|
||||
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.34.4.tgz",
|
||||
"integrity": "sha512-FUH39xp3SBPnxWvd5iib1X8XY7J0K0X7d93sie9CJg2PO8/7gmg89Nve6OjItK53/MlAushNNxteBYfM6DEuoA==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@img/colour": "^1.0.0",
|
||||
"detect-libc": "^2.1.0",
|
||||
"semver": "^7.7.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.17.0 || ^20.3.0 || >=21.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/libvips"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@img/sharp-darwin-arm64": "0.34.4",
|
||||
"@img/sharp-darwin-x64": "0.34.4",
|
||||
"@img/sharp-libvips-darwin-arm64": "1.2.3",
|
||||
"@img/sharp-libvips-darwin-x64": "1.2.3",
|
||||
"@img/sharp-libvips-linux-arm": "1.2.3",
|
||||
"@img/sharp-libvips-linux-arm64": "1.2.3",
|
||||
"@img/sharp-libvips-linux-ppc64": "1.2.3",
|
||||
"@img/sharp-libvips-linux-s390x": "1.2.3",
|
||||
"@img/sharp-libvips-linux-x64": "1.2.3",
|
||||
"@img/sharp-libvips-linuxmusl-arm64": "1.2.3",
|
||||
"@img/sharp-libvips-linuxmusl-x64": "1.2.3",
|
||||
"@img/sharp-linux-arm": "0.34.4",
|
||||
"@img/sharp-linux-arm64": "0.34.4",
|
||||
"@img/sharp-linux-ppc64": "0.34.4",
|
||||
"@img/sharp-linux-s390x": "0.34.4",
|
||||
"@img/sharp-linux-x64": "0.34.4",
|
||||
"@img/sharp-linuxmusl-arm64": "0.34.4",
|
||||
"@img/sharp-linuxmusl-x64": "0.34.4",
|
||||
"@img/sharp-wasm32": "0.34.4",
|
||||
"@img/sharp-win32-arm64": "0.34.4",
|
||||
"@img/sharp-win32-ia32": "0.34.4",
|
||||
"@img/sharp-win32-x64": "0.34.4"
|
||||
}
|
||||
},
|
||||
"node_modules/sliced": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz",
|
||||
"integrity": "sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA=="
|
||||
},
|
||||
"node_modules/smart-buffer": {
|
||||
"version": "4.2.0",
|
||||
"resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
|
||||
@ -1574,8 +2171,15 @@
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.21.0",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
|
||||
"devOptional": true
|
||||
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz",
|
||||
"integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==",
|
||||
"engines": {
|
||||
"node": ">= 10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/uuid": {
|
||||
"version": "11.1.0",
|
||||
|
||||
16
package.json
16
package.json
@ -17,17 +17,29 @@
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.0.0",
|
||||
"@types/node": "^20.19.19",
|
||||
"@types/png-chunk-text": "^1.0.3",
|
||||
"@types/png-chunks-encode": "^1.0.2",
|
||||
"@types/png-chunks-extract": "^1.0.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/axios": "^0.14.4",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/pngjs": "^6.0.5",
|
||||
"@types/sharp": "^0.32.0",
|
||||
"axios": "^1.11.0",
|
||||
"dotenv": "^17.2.1",
|
||||
"fs-extra": "^11.3.2",
|
||||
"mysql2": "^3.14.3",
|
||||
"open": "^10.2.0",
|
||||
"png-chunk-text": "^1.0.0",
|
||||
"png-chunks-encode": "^1.0.0",
|
||||
"png-chunks-extract": "^1.0.0",
|
||||
"pngjs": "^7.0.0",
|
||||
"puppeteer": "^24.16.2",
|
||||
"sharp": "^0.34.4",
|
||||
"uuid": "^11.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"seed": 920668017513581,
|
||||
"seed": 936152772258115,
|
||||
"steps": 8,
|
||||
"cfg": 1,
|
||||
"sampler_name": "euler",
|
||||
@ -76,8 +76,8 @@
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"11",
|
||||
6
|
||||
"28",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
@ -163,11 +163,11 @@
|
||||
0
|
||||
],
|
||||
"image1": [
|
||||
"23",
|
||||
"30",
|
||||
0
|
||||
],
|
||||
"image2": [
|
||||
"24",
|
||||
"27",
|
||||
0
|
||||
]
|
||||
},
|
||||
@ -178,20 +178,41 @@
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"image": "f81662775bd0e7950e4794933ef4b3d973fbb9c2db397c8b46809797954d0074.png"
|
||||
"image": "model_outfit_location_1760043932148.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
"title": "load base image"
|
||||
}
|
||||
},
|
||||
"15": {
|
||||
"19": {
|
||||
"inputs": {
|
||||
"image": "monster_554.png"
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_dxzmg_00211_.png&type=temp&subfolder=&rand=0.09499077981761894"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_dxzmg_00212_.png&type=temp&subfolder=&rand=0.21125213225471684"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_b": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"20": {
|
||||
@ -209,7 +230,7 @@
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "只提取图2中的怪物,怪物站在图1的女生身后,使用图1的背景,并调整怪物的光线以符合图1。\n\n\n\n\n\n\n"
|
||||
"value": "请将图2中的模特处理成手持图1中包包的照片。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
@ -231,17 +252,65 @@
|
||||
},
|
||||
"23": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"inputs": {
|
||||
"vae_name": "sdxl_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"23",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"24",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"25",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"27": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"26",
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "stretch",
|
||||
"pad_color": "0, 0, 0",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
@ -255,24 +324,49 @@
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"28": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode",
|
||||
"_meta": {
|
||||
"title": "VAE Encode"
|
||||
}
|
||||
},
|
||||
"29": {
|
||||
"inputs": {
|
||||
"image": "handbag_1760043932148.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "load reference image"
|
||||
}
|
||||
},
|
||||
"30": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"25",
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"26",
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "stretch",
|
||||
"pad_color": "0, 0, 0",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"15",
|
||||
"29",
|
||||
0
|
||||
]
|
||||
},
|
||||
@ -281,20 +375,20 @@
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"31": {
|
||||
"inputs": {
|
||||
"Number": "1280"
|
||||
"Value": 720
|
||||
},
|
||||
"class_type": "Int",
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "width"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"32": {
|
||||
"inputs": {
|
||||
"Number": "720"
|
||||
"Value": 1280
|
||||
},
|
||||
"class_type": "Int",
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "height"
|
||||
}
|
||||
|
||||
444
src/comfyworkflows/edit_image_2_qwen_empty.json
Normal file
444
src/comfyworkflows/edit_image_2_qwen_empty.json
Normal file
@ -0,0 +1,444 @@
|
||||
{
|
||||
"1": {
|
||||
"inputs": {
|
||||
"unet_name": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||
"weight_dtype": "default"
|
||||
},
|
||||
"class_type": "UNETLoader",
|
||||
"_meta": {
|
||||
"title": "Load Diffusion Model"
|
||||
}
|
||||
},
|
||||
"2": {
|
||||
"inputs": {
|
||||
"clip_name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||
"type": "qwen_image",
|
||||
"device": "default"
|
||||
},
|
||||
"class_type": "CLIPLoader",
|
||||
"_meta": {
|
||||
"title": "Load CLIP"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"inputs": {
|
||||
"vae_name": "qwen_image_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"inputs": {
|
||||
"lora_name": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||
"strength_model": 1,
|
||||
"model": [
|
||||
"1",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LoraLoaderModelOnly",
|
||||
"_meta": {
|
||||
"title": "LoraLoaderModelOnly"
|
||||
}
|
||||
},
|
||||
"5": {
|
||||
"inputs": {
|
||||
"conditioning": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ConditioningZeroOut",
|
||||
"_meta": {
|
||||
"title": "ConditioningZeroOut"
|
||||
}
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"seed": 38026585691397,
|
||||
"steps": 8,
|
||||
"cfg": 1,
|
||||
"sampler_name": "euler",
|
||||
"scheduler": "beta",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"11",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"5",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"36",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"7",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"font_file": "Alibaba-PuHuiTi-Heavy.ttf",
|
||||
"font_size": 40,
|
||||
"border": 32,
|
||||
"color_theme": "light",
|
||||
"reel_1": [
|
||||
"10",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReelComposit",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel Composit"
|
||||
}
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"image1_text": "Original image",
|
||||
"image2_text": "Reference",
|
||||
"image3_text": "Result",
|
||||
"image4_text": "image4",
|
||||
"reel_height": 512,
|
||||
"border": 32,
|
||||
"image1": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image2": [
|
||||
"11",
|
||||
2
|
||||
],
|
||||
"image3": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReel",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel"
|
||||
}
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"prompt": [
|
||||
"21",
|
||||
0
|
||||
],
|
||||
"enable_resize": false,
|
||||
"enable_vl_resize": false,
|
||||
"upscale_method": "lanczos",
|
||||
"crop": "disabled",
|
||||
"instruction": "<|im_start|>system\nDescribe the key features of the input image (color, shape, size, texture, objects, background), then explain how the user's text instruction should alter or modify the image. Generate a new image that meets the user's requirements while maintaining consistency with the original input where appropriate.<|im_end|>\n<|im_start|>user\n{}<|im_end|>\n<|im_start|>assistant\n",
|
||||
"clip": [
|
||||
"2",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
],
|
||||
"image1": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"image2": [
|
||||
"33",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "TextEncodeQwenImageEditPlus_lrzjason",
|
||||
"_meta": {
|
||||
"title": "TextEncodeQwenImageEditPlus 小志Jason(xiaozhijason)"
|
||||
}
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"image": "model_1760082843769.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "load base image"
|
||||
}
|
||||
},
|
||||
"19": {
|
||||
"inputs": {
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00279_.png&type=temp&subfolder=&rand=0.4405150352070387"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00280_.png&type=temp&subfolder=&rand=0.9388629603648289"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_b": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"20": {
|
||||
"inputs": {
|
||||
"filename_prefix": "combined",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "以图像2为基础,生成一张女性肖像照片。她穿着一件黑色薄纱长袖上衣,一条光滑的皮革及膝裙,和勃艮第色的尖头靴子,手提一个深红色的手提包。场景改为极简主义风格的客厅,摆放着中性的沙发、镜面墙饰、盆栽植物和浅色地板,营造出明亮而宽敞的美感。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
"title": "String (Multiline)"
|
||||
}
|
||||
},
|
||||
"22": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"9",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"23": {
|
||||
"inputs": {
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"inputs": {
|
||||
"vae_name": "sdxl_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"23",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"24",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"25",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"27": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"14",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"28": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode",
|
||||
"_meta": {
|
||||
"title": "VAE Encode"
|
||||
}
|
||||
},
|
||||
"29": {
|
||||
"inputs": {
|
||||
"image": "pose_1760082843769.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "load reference image"
|
||||
}
|
||||
},
|
||||
"30": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"29",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"31": {
|
||||
"inputs": {
|
||||
"Value": 720
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "width"
|
||||
}
|
||||
},
|
||||
"32": {
|
||||
"inputs": {
|
||||
"Value": 1280
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "height"
|
||||
}
|
||||
},
|
||||
"33": {
|
||||
"inputs": {
|
||||
"detect_hand": "enable",
|
||||
"detect_body": "enable",
|
||||
"detect_face": "enable",
|
||||
"resolution": 512,
|
||||
"bbox_detector": "yolox_l.onnx",
|
||||
"pose_estimator": "dw-ll_ucoco_384_bs5.torchscript.pt",
|
||||
"scale_stick_for_xinsr_cn": "disable",
|
||||
"image": [
|
||||
"30",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "DWPreprocessor",
|
||||
"_meta": {
|
||||
"title": "DWPose Estimator"
|
||||
}
|
||||
},
|
||||
"35": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"33",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"36": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
}
|
||||
}
|
||||
396
src/comfyworkflows/edit_image_2_qwen_handbag.json
Normal file
396
src/comfyworkflows/edit_image_2_qwen_handbag.json
Normal file
@ -0,0 +1,396 @@
|
||||
{
|
||||
"1": {
|
||||
"inputs": {
|
||||
"unet_name": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||
"weight_dtype": "default"
|
||||
},
|
||||
"class_type": "UNETLoader",
|
||||
"_meta": {
|
||||
"title": "Load Diffusion Model"
|
||||
}
|
||||
},
|
||||
"2": {
|
||||
"inputs": {
|
||||
"clip_name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||
"type": "qwen_image",
|
||||
"device": "default"
|
||||
},
|
||||
"class_type": "CLIPLoader",
|
||||
"_meta": {
|
||||
"title": "Load CLIP"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"inputs": {
|
||||
"vae_name": "qwen_image_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"inputs": {
|
||||
"lora_name": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||
"strength_model": 1,
|
||||
"model": [
|
||||
"1",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LoraLoaderModelOnly",
|
||||
"_meta": {
|
||||
"title": "LoraLoaderModelOnly"
|
||||
}
|
||||
},
|
||||
"5": {
|
||||
"inputs": {
|
||||
"conditioning": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ConditioningZeroOut",
|
||||
"_meta": {
|
||||
"title": "ConditioningZeroOut"
|
||||
}
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"seed": 323591075024702,
|
||||
"steps": 8,
|
||||
"cfg": 1,
|
||||
"sampler_name": "euler",
|
||||
"scheduler": "beta",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"11",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"5",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"28",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"7",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"font_file": "Alibaba-PuHuiTi-Heavy.ttf",
|
||||
"font_size": 40,
|
||||
"border": 32,
|
||||
"color_theme": "light",
|
||||
"reel_1": [
|
||||
"10",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReelComposit",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel Composit"
|
||||
}
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"image1_text": "Original image",
|
||||
"image2_text": "Reference",
|
||||
"image3_text": "Result",
|
||||
"image4_text": "image4",
|
||||
"reel_height": 512,
|
||||
"border": 32,
|
||||
"image1": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image2": [
|
||||
"11",
|
||||
2
|
||||
],
|
||||
"image3": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReel",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel"
|
||||
}
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"prompt": [
|
||||
"21",
|
||||
0
|
||||
],
|
||||
"enable_resize": true,
|
||||
"enable_vl_resize": true,
|
||||
"upscale_method": "lanczos",
|
||||
"crop": "disabled",
|
||||
"instruction": "<|im_start|>system\nDescribe the key features of the input image (color, shape, size, texture, objects, background), then explain how the user's text instruction should alter or modify the image. Generate a new image that meets the user's requirements while maintaining consistency with the original input where appropriate.<|im_end|>\n<|im_start|>user\n{}<|im_end|>\n<|im_start|>assistant\n",
|
||||
"clip": [
|
||||
"2",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
],
|
||||
"image1": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"image2": [
|
||||
"30",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "TextEncodeQwenImageEditPlus_lrzjason",
|
||||
"_meta": {
|
||||
"title": "TextEncodeQwenImageEditPlus 小志Jason(xiaozhijason)"
|
||||
}
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"image": "model_outfit_location_handbag1_1760085003312.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"19": {
|
||||
"inputs": {
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00305_.png&type=temp&subfolder=&rand=0.5408789951924671"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00306_.png&type=temp&subfolder=&rand=0.2425856190711294"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_b": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"20": {
|
||||
"inputs": {
|
||||
"filename_prefix": "combined",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "请将图2中的女性修改成把图1的包背在肩上。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
"title": "String (Multiline)"
|
||||
}
|
||||
},
|
||||
"22": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"9",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"23": {
|
||||
"inputs": {
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"inputs": {
|
||||
"vae_name": "sdxl_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"23",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"24",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"25",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"27": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "crop",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"14",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"28": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode",
|
||||
"_meta": {
|
||||
"title": "VAE Encode"
|
||||
}
|
||||
},
|
||||
"29": {
|
||||
"inputs": {
|
||||
"image": "handbag_1760085003312.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"30": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "crop",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"29",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"31": {
|
||||
"inputs": {
|
||||
"Value": 720
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "width"
|
||||
}
|
||||
},
|
||||
"32": {
|
||||
"inputs": {
|
||||
"Value": 1280
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "height"
|
||||
}
|
||||
}
|
||||
}
|
||||
444
src/comfyworkflows/edit_image_2_qwen_pose.json
Normal file
444
src/comfyworkflows/edit_image_2_qwen_pose.json
Normal file
@ -0,0 +1,444 @@
|
||||
{
|
||||
"1": {
|
||||
"inputs": {
|
||||
"unet_name": "qwen_image_edit_2509_fp8_e4m3fn.safetensors",
|
||||
"weight_dtype": "default"
|
||||
},
|
||||
"class_type": "UNETLoader",
|
||||
"_meta": {
|
||||
"title": "Load Diffusion Model"
|
||||
}
|
||||
},
|
||||
"2": {
|
||||
"inputs": {
|
||||
"clip_name": "qwen_2.5_vl_7b_fp8_scaled.safetensors",
|
||||
"type": "qwen_image",
|
||||
"device": "default"
|
||||
},
|
||||
"class_type": "CLIPLoader",
|
||||
"_meta": {
|
||||
"title": "Load CLIP"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"inputs": {
|
||||
"vae_name": "qwen_image_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"inputs": {
|
||||
"lora_name": "Qwen-Image-Lightning-8steps-V2.0.safetensors",
|
||||
"strength_model": 1,
|
||||
"model": [
|
||||
"1",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LoraLoaderModelOnly",
|
||||
"_meta": {
|
||||
"title": "LoraLoaderModelOnly"
|
||||
}
|
||||
},
|
||||
"5": {
|
||||
"inputs": {
|
||||
"conditioning": [
|
||||
"11",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ConditioningZeroOut",
|
||||
"_meta": {
|
||||
"title": "ConditioningZeroOut"
|
||||
}
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"seed": 38026585691397,
|
||||
"steps": 8,
|
||||
"cfg": 1,
|
||||
"sampler_name": "euler",
|
||||
"scheduler": "beta",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"11",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"5",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"36",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"7",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"font_file": "Alibaba-PuHuiTi-Heavy.ttf",
|
||||
"font_size": 40,
|
||||
"border": 32,
|
||||
"color_theme": "light",
|
||||
"reel_1": [
|
||||
"10",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReelComposit",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel Composit"
|
||||
}
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"image1_text": "Original image",
|
||||
"image2_text": "Reference",
|
||||
"image3_text": "Result",
|
||||
"image4_text": "image4",
|
||||
"reel_height": 512,
|
||||
"border": 32,
|
||||
"image1": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image2": [
|
||||
"11",
|
||||
2
|
||||
],
|
||||
"image3": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReel",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel"
|
||||
}
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"prompt": [
|
||||
"21",
|
||||
0
|
||||
],
|
||||
"enable_resize": false,
|
||||
"enable_vl_resize": false,
|
||||
"upscale_method": "lanczos",
|
||||
"crop": "disabled",
|
||||
"instruction": "<|im_start|>system\nDescribe the key features of the input image (color, shape, size, texture, objects, background), then explain how the user's text instruction should alter or modify the image. Generate a new image that meets the user's requirements while maintaining consistency with the original input where appropriate.<|im_end|>\n<|im_start|>user\n{}<|im_end|>\n<|im_start|>assistant\n",
|
||||
"clip": [
|
||||
"2",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
],
|
||||
"image1": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"image2": [
|
||||
"33",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "TextEncodeQwenImageEditPlus_lrzjason",
|
||||
"_meta": {
|
||||
"title": "TextEncodeQwenImageEditPlus 小志Jason(xiaozhijason)"
|
||||
}
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"image": "model_1760082843769.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "load base image"
|
||||
}
|
||||
},
|
||||
"19": {
|
||||
"inputs": {
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00279_.png&type=temp&subfolder=&rand=0.4405150352070387"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_uoazy_00280_.png&type=temp&subfolder=&rand=0.9388629603648289"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_b": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"20": {
|
||||
"inputs": {
|
||||
"filename_prefix": "combined",
|
||||
"images": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "以图像2为基础,生成一张女性肖像照片。她穿着一件黑色薄纱长袖上衣,一条光滑的皮革及膝裙,和勃艮第色的尖头靴子,手提一个深红色的手提包。场景改为极简主义风格的客厅,摆放着中性的沙发、镜面墙饰、盆栽植物和浅色地板,营造出明亮而宽敞的美感。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
"title": "String (Multiline)"
|
||||
}
|
||||
},
|
||||
"22": {
|
||||
"inputs": {
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"9",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"23": {
|
||||
"inputs": {
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"inputs": {
|
||||
"vae_name": "sdxl_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"23",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"24",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"25",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"27": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"14",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"28": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode",
|
||||
"_meta": {
|
||||
"title": "VAE Encode"
|
||||
}
|
||||
},
|
||||
"29": {
|
||||
"inputs": {
|
||||
"image": "pose_1760082843769.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "load reference image"
|
||||
}
|
||||
},
|
||||
"30": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "resize",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"29",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"31": {
|
||||
"inputs": {
|
||||
"Value": 720
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "width"
|
||||
}
|
||||
},
|
||||
"32": {
|
||||
"inputs": {
|
||||
"Value": 1280
|
||||
},
|
||||
"class_type": "DF_Integer",
|
||||
"_meta": {
|
||||
"title": "height"
|
||||
}
|
||||
},
|
||||
"33": {
|
||||
"inputs": {
|
||||
"detect_hand": "enable",
|
||||
"detect_body": "enable",
|
||||
"detect_face": "enable",
|
||||
"resolution": 512,
|
||||
"bbox_detector": "yolox_l.onnx",
|
||||
"pose_estimator": "dw-ll_ucoco_384_bs5.torchscript.pt",
|
||||
"scale_stick_for_xinsr_cn": "disable",
|
||||
"image": [
|
||||
"30",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "DWPreprocessor",
|
||||
"_meta": {
|
||||
"title": "DWPose Estimator"
|
||||
}
|
||||
},
|
||||
"35": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"33",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"36": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"31",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"32",
|
||||
0
|
||||
],
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -57,7 +57,7 @@
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"seed": 838097333311955,
|
||||
"seed": 639545413023960,
|
||||
"steps": 8,
|
||||
"cfg": 1,
|
||||
"sampler_name": "euler",
|
||||
@ -76,8 +76,8 @@
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"11",
|
||||
6
|
||||
"28",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
@ -101,6 +101,48 @@
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"font_file": "Alibaba-PuHuiTi-Heavy.ttf",
|
||||
"font_size": 40,
|
||||
"border": 32,
|
||||
"color_theme": "light",
|
||||
"reel_1": [
|
||||
"10",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReelComposit",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel Composit"
|
||||
}
|
||||
},
|
||||
"10": {
|
||||
"inputs": {
|
||||
"image1_text": "Original image",
|
||||
"image2_text": "Reference",
|
||||
"image3_text": "Result",
|
||||
"image4_text": "image4",
|
||||
"reel_height": 512,
|
||||
"border": 32,
|
||||
"image1": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image2": [
|
||||
"11",
|
||||
2
|
||||
],
|
||||
"image3": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "LayerUtility: ImageReel",
|
||||
"_meta": {
|
||||
"title": "LayerUtility: Image Reel"
|
||||
}
|
||||
},
|
||||
"11": {
|
||||
"inputs": {
|
||||
"prompt": [
|
||||
@ -121,7 +163,7 @@
|
||||
0
|
||||
],
|
||||
"image1": [
|
||||
"24",
|
||||
"27",
|
||||
0
|
||||
]
|
||||
},
|
||||
@ -130,6 +172,45 @@
|
||||
"title": "TextEncodeQwenImageEditPlus 小志Jason(xiaozhijason)"
|
||||
}
|
||||
},
|
||||
"14": {
|
||||
"inputs": {
|
||||
"image": "7318418139276581_1759654853736_18 - コピー.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"19": {
|
||||
"inputs": {
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_niitk_00003_.png&type=temp&subfolder=&rand=0.9166876008508786"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_niitk_00004_.png&type=temp&subfolder=&rand=0.06689875639286158"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"11",
|
||||
1
|
||||
],
|
||||
"image_b": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"20": {
|
||||
"inputs": {
|
||||
"filename_prefix": "qwenedit",
|
||||
@ -145,43 +226,66 @@
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "change camera angle to closeup face from image1, change background to light gray with faing gradient, change face angle to look at directry look at camera"
|
||||
"value": "请从图1中提取主要主体,把背景设置为浅灰色,并让主体正面朝向,制作成产品照片。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
"title": "String (Multiline)"
|
||||
}
|
||||
},
|
||||
"24": {
|
||||
"22": {
|
||||
"inputs": {
|
||||
"measurement": "pixels",
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"fit": "contain",
|
||||
"method": "nearest-exact",
|
||||
"image": [
|
||||
"64",
|
||||
"filename_prefix": "ComfyUI",
|
||||
"images": [
|
||||
"9",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Resize (rgthree)",
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Image Resize (rgthree)"
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"64": {
|
||||
"23": {
|
||||
"inputs": {
|
||||
"image": "1337074888177434_1758776251440_2.png"
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"65": {
|
||||
"24": {
|
||||
"inputs": {
|
||||
"vae_name": "sdxl_vae.safetensors"
|
||||
},
|
||||
"class_type": "VAELoader",
|
||||
"_meta": {
|
||||
"title": "Load VAE"
|
||||
}
|
||||
},
|
||||
"25": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"23",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"24",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"26": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"24",
|
||||
"25",
|
||||
0
|
||||
]
|
||||
},
|
||||
@ -189,5 +293,41 @@
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"27": {
|
||||
"inputs": {
|
||||
"width": 720,
|
||||
"height": 1280,
|
||||
"upscale_method": "nearest-exact",
|
||||
"keep_proportion": "pad",
|
||||
"pad_color": "192,192,192",
|
||||
"crop_position": "center",
|
||||
"divisible_by": 2,
|
||||
"device": "cpu",
|
||||
"image": [
|
||||
"14",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageResizeKJv2",
|
||||
"_meta": {
|
||||
"title": "Resize Image v2"
|
||||
}
|
||||
},
|
||||
"28": {
|
||||
"inputs": {
|
||||
"pixels": [
|
||||
"27",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "VAEEncode",
|
||||
"_meta": {
|
||||
"title": "VAE Encode"
|
||||
}
|
||||
}
|
||||
}
|
||||
111
src/comfyworkflows/facerestore_upscale.json
Normal file
111
src/comfyworkflows/facerestore_upscale.json
Normal file
@ -0,0 +1,111 @@
|
||||
{
|
||||
"1": {
|
||||
"inputs": {
|
||||
"image": "model_outfit_location_handbag3_1760086053609.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"2": {
|
||||
"inputs": {
|
||||
"enabled": true,
|
||||
"swap_model": "inswapper_128.onnx",
|
||||
"facedetection": "YOLOv5l",
|
||||
"face_restore_model": "GPEN-BFR-1024.onnx",
|
||||
"face_restore_visibility": 0.5200000000000001,
|
||||
"codeformer_weight": 0.5,
|
||||
"detect_gender_input": "no",
|
||||
"detect_gender_source": "no",
|
||||
"input_faces_index": "0",
|
||||
"source_faces_index": "0",
|
||||
"console_log_level": 1,
|
||||
"input_image": [
|
||||
"6",
|
||||
0
|
||||
],
|
||||
"source_image": [
|
||||
"3",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ReActorFaceSwap",
|
||||
"_meta": {
|
||||
"title": "ReActor 🌌 Fast Face Swap"
|
||||
}
|
||||
},
|
||||
"3": {
|
||||
"inputs": {
|
||||
"image": "outfit_1760086053609.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"4": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"2",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"6": {
|
||||
"inputs": {
|
||||
"resize_to": "4k",
|
||||
"images": [
|
||||
"1",
|
||||
0
|
||||
],
|
||||
"upscaler_trt_model": [
|
||||
"8",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "UpscalerTensorrt",
|
||||
"_meta": {
|
||||
"title": "Upscaler Tensorrt ⚡"
|
||||
}
|
||||
},
|
||||
"7": {
|
||||
"inputs": {
|
||||
"images": [
|
||||
"6",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "PreviewImage",
|
||||
"_meta": {
|
||||
"title": "Preview Image"
|
||||
}
|
||||
},
|
||||
"8": {
|
||||
"inputs": {
|
||||
"model": "4x-UltraSharp",
|
||||
"precision": "fp16"
|
||||
},
|
||||
"class_type": "LoadUpscalerTensorrtModel",
|
||||
"_meta": {
|
||||
"title": "Load Upscale Tensorrt Model"
|
||||
}
|
||||
},
|
||||
"9": {
|
||||
"inputs": {
|
||||
"filename_prefix": "upscaled",
|
||||
"images": [
|
||||
"2",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
}
|
||||
}
|
||||
388
src/comfyworkflows/upscale.json
Normal file
388
src/comfyworkflows/upscale.json
Normal file
@ -0,0 +1,388 @@
|
||||
{
|
||||
"4": {
|
||||
"inputs": {
|
||||
"ckpt_name": "dreamshaperXL_v21TurboDPMSDE.safetensors"
|
||||
},
|
||||
"class_type": "CheckpointLoaderSimple",
|
||||
"_meta": {
|
||||
"title": "Load Checkpoint"
|
||||
}
|
||||
},
|
||||
"12": {
|
||||
"inputs": {
|
||||
"seed": 302411063911982,
|
||||
"steps": 8,
|
||||
"cfg": 2,
|
||||
"sampler_name": "dpmpp_sde",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"65",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"69",
|
||||
0
|
||||
],
|
||||
"latent_image": [
|
||||
"13",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"13": {
|
||||
"inputs": {
|
||||
"width": 1216,
|
||||
"height": 832,
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"16": {
|
||||
"inputs": {
|
||||
"samples": [
|
||||
"12",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"4",
|
||||
2
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecode",
|
||||
"_meta": {
|
||||
"title": "VAE Decode"
|
||||
}
|
||||
},
|
||||
"36": {
|
||||
"inputs": {
|
||||
"method": "Mixture of Diffusers",
|
||||
"tile_width": 1024,
|
||||
"tile_height": 1024,
|
||||
"tile_overlap": 32,
|
||||
"tile_batch_size": 8,
|
||||
"model": [
|
||||
"4",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "TiledDiffusion",
|
||||
"_meta": {
|
||||
"title": "Tiled Diffusion"
|
||||
}
|
||||
},
|
||||
"51": {
|
||||
"inputs": {
|
||||
"tile_size": 1024,
|
||||
"fast": false,
|
||||
"samples": [
|
||||
"80",
|
||||
0
|
||||
],
|
||||
"vae": [
|
||||
"4",
|
||||
2
|
||||
]
|
||||
},
|
||||
"class_type": "VAEDecodeTiled_TiledDiffusion",
|
||||
"_meta": {
|
||||
"title": "Tiled VAE Decode"
|
||||
}
|
||||
},
|
||||
"65": {
|
||||
"inputs": {
|
||||
"text": "photo of a high end sports car",
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode",
|
||||
"_meta": {
|
||||
"title": "CLIP Text Encode (Prompt)"
|
||||
}
|
||||
},
|
||||
"69": {
|
||||
"inputs": {
|
||||
"text": "text, watermark, (film grain, noise:1.2)",
|
||||
"clip": [
|
||||
"4",
|
||||
1
|
||||
]
|
||||
},
|
||||
"class_type": "CLIPTextEncode",
|
||||
"_meta": {
|
||||
"title": "CLIP Text Encode (Prompt)"
|
||||
}
|
||||
},
|
||||
"80": {
|
||||
"inputs": {
|
||||
"seed": 105566927616764,
|
||||
"steps": 4,
|
||||
"cfg": 2,
|
||||
"sampler_name": "dpmpp_sde",
|
||||
"scheduler": "karras",
|
||||
"denoise": 1,
|
||||
"model": [
|
||||
"36",
|
||||
0
|
||||
],
|
||||
"positive": [
|
||||
"141",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"141",
|
||||
1
|
||||
],
|
||||
"latent_image": [
|
||||
"84",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "KSampler",
|
||||
"_meta": {
|
||||
"title": "KSampler"
|
||||
}
|
||||
},
|
||||
"84": {
|
||||
"inputs": {
|
||||
"width": [
|
||||
"106",
|
||||
0
|
||||
],
|
||||
"height": [
|
||||
"107",
|
||||
0
|
||||
],
|
||||
"batch_size": 1
|
||||
},
|
||||
"class_type": "EmptyLatentImage",
|
||||
"_meta": {
|
||||
"title": "Empty Latent Image"
|
||||
}
|
||||
},
|
||||
"105": {
|
||||
"inputs": {
|
||||
"image": [
|
||||
"115",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "GetImageSizeAndCount",
|
||||
"_meta": {
|
||||
"title": "Get Image Size & Count"
|
||||
}
|
||||
},
|
||||
"106": {
|
||||
"inputs": {
|
||||
"value": "a*b",
|
||||
"a": [
|
||||
"105",
|
||||
1
|
||||
],
|
||||
"b": [
|
||||
"117",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SimpleMath+",
|
||||
"_meta": {
|
||||
"title": "🔧 Simple Math"
|
||||
}
|
||||
},
|
||||
"107": {
|
||||
"inputs": {
|
||||
"value": "a*b",
|
||||
"a": [
|
||||
"105",
|
||||
2
|
||||
],
|
||||
"b": [
|
||||
"117",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SimpleMath+",
|
||||
"_meta": {
|
||||
"title": "🔧 Simple Math"
|
||||
}
|
||||
},
|
||||
"111": {
|
||||
"inputs": {
|
||||
"image": "model_outfit_location_handbag1_1760092227085.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"115": {
|
||||
"inputs": {
|
||||
"any_01": [
|
||||
"111",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Any Switch (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Any Switch (rgthree)"
|
||||
}
|
||||
},
|
||||
"117": {
|
||||
"inputs": {
|
||||
"value": 4.000000000000001
|
||||
},
|
||||
"class_type": "FloatConstant",
|
||||
"_meta": {
|
||||
"title": "Float Constant"
|
||||
}
|
||||
},
|
||||
"133": {
|
||||
"inputs": {
|
||||
"rgthree_comparer": {
|
||||
"images": [
|
||||
{
|
||||
"name": "A",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_ybqmm_00009_.png&type=temp&subfolder=&rand=0.02707950499627365"
|
||||
},
|
||||
{
|
||||
"name": "B",
|
||||
"selected": true,
|
||||
"url": "/api/view?filename=rgthree.compare._temp_ybqmm_00010_.png&type=temp&subfolder=&rand=0.18690183070180255"
|
||||
}
|
||||
]
|
||||
},
|
||||
"image_a": [
|
||||
"115",
|
||||
0
|
||||
],
|
||||
"image_b": [
|
||||
"149",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "Image Comparer (rgthree)",
|
||||
"_meta": {
|
||||
"title": "Image Comparer (rgthree)"
|
||||
}
|
||||
},
|
||||
"141": {
|
||||
"inputs": {
|
||||
"strength": 0.65,
|
||||
"start_percent": 0,
|
||||
"end_percent": 0.9,
|
||||
"positive": [
|
||||
"65",
|
||||
0
|
||||
],
|
||||
"negative": [
|
||||
"69",
|
||||
0
|
||||
],
|
||||
"control_net": [
|
||||
"142",
|
||||
0
|
||||
],
|
||||
"image": [
|
||||
"115",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ACN_AdvancedControlNetApply",
|
||||
"_meta": {
|
||||
"title": "Apply Advanced ControlNet 🛂🅐🅒🅝"
|
||||
}
|
||||
},
|
||||
"142": {
|
||||
"inputs": {
|
||||
"control_net_name": "xinsircontrolnet-tile-sdxl-1.0.safetensors"
|
||||
},
|
||||
"class_type": "ControlNetLoaderAdvanced",
|
||||
"_meta": {
|
||||
"title": "Load Advanced ControlNet Model 🛂🅐🅒🅝"
|
||||
}
|
||||
},
|
||||
"148": {
|
||||
"inputs": {
|
||||
"color_space": "LAB",
|
||||
"factor": 0.8,
|
||||
"device": "auto",
|
||||
"batch_size": 0,
|
||||
"image": [
|
||||
"51",
|
||||
0
|
||||
],
|
||||
"reference": [
|
||||
"115",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageColorMatch+",
|
||||
"_meta": {
|
||||
"title": "🔧 Image Color Match"
|
||||
}
|
||||
},
|
||||
"149": {
|
||||
"inputs": {
|
||||
"sharpen_radius": 1,
|
||||
"sigma": 1,
|
||||
"alpha": 0.05,
|
||||
"image": [
|
||||
"148",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "ImageSharpen",
|
||||
"_meta": {
|
||||
"title": "Image Sharpen"
|
||||
}
|
||||
},
|
||||
"154": {
|
||||
"inputs": {
|
||||
"filename_prefix": "Upscaled",
|
||||
"images": [
|
||||
"149",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
},
|
||||
"165": {
|
||||
"inputs": {
|
||||
"image": "model_outfit_location_handbag1_1760092227085.png"
|
||||
},
|
||||
"class_type": "LoadImage",
|
||||
"_meta": {
|
||||
"title": "Load Image"
|
||||
}
|
||||
},
|
||||
"166": {
|
||||
"inputs": {
|
||||
"filename_prefix": "upscaled",
|
||||
"images": [
|
||||
"149",
|
||||
0
|
||||
]
|
||||
},
|
||||
"class_type": "SaveImage",
|
||||
"_meta": {
|
||||
"title": "Save Image"
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -158,7 +158,7 @@
|
||||
},
|
||||
"21": {
|
||||
"inputs": {
|
||||
"value": "图2中的女孩穿着图1的衣服\n\n\n\n\n\n"
|
||||
"value": "图2中的人物穿着图1的上衣、下装和配饰。"
|
||||
},
|
||||
"class_type": "PrimitiveStringMultiline",
|
||||
"_meta": {
|
||||
|
||||
@ -15,17 +15,27 @@ async function convertImage(
|
||||
baseFileName: string,
|
||||
comfyBaseUrl: string,
|
||||
comfyOutputDir: string,
|
||||
size: ImageSize = { width: 720, height: 1280 }
|
||||
size: ImageSize = { width: 720, height: 1280 },
|
||||
useEmpltyLatent: boolean = false
|
||||
): Promise<string> {
|
||||
const COMFY_BASE_URL = comfyBaseUrl.replace(/\/$/, '');
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_qwen.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['24']['inputs']['width'] = size.width;
|
||||
workflow['24']['inputs']['height'] = size.height;
|
||||
workflow['64']['inputs']['image'] = baseFileName;
|
||||
if (useEmpltyLatent) {
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_2_qwen_empty.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['30']['inputs']['width'] = size.width;
|
||||
workflow['31']['inputs']['height'] = size.height;
|
||||
workflow['14']['inputs']['image'] = baseFileName;
|
||||
} else {
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_qwen.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['23']['inputs']['width'] = size.width;
|
||||
workflow['23']['inputs']['height'] = size.height;
|
||||
workflow['14']['inputs']['image'] = baseFileName;
|
||||
}
|
||||
|
||||
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
@ -66,7 +76,8 @@ async function convertImage(
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
|
||||
// basefilename is connected to image2
|
||||
// sencondfilename is connect to image1
|
||||
async function convertImageWithFile(
|
||||
prompt: string,
|
||||
baseFileName: string,
|
||||
@ -81,10 +92,131 @@ async function convertImageWithFile(
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_2_qwen.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['25']['inputs']['width'] = size.width;
|
||||
workflow['26']['inputs']['height'] = size.height;
|
||||
workflow['31']['inputs']['Value'] = size.width;
|
||||
workflow['32']['inputs']['Value'] = size.height;
|
||||
workflow['14']['inputs']['image'] = baseFileName;
|
||||
workflow['15']['inputs']['image'] = secondFileName;
|
||||
workflow['29']['inputs']['image'] = secondFileName;
|
||||
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
|
||||
let history;
|
||||
do {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
const historyResponse = await axios.get(`${COMFY_BASE_URL}/history/${promptId}`);
|
||||
history = historyResponse.data[promptId];
|
||||
} while (!history || Object.keys(history.outputs).length === 0);
|
||||
|
||||
const files = await fs.readdir(COMFY_OUTPUT_DIR!);
|
||||
const generatedFiles = files.filter(file => file.startsWith('combined'));
|
||||
|
||||
const fileStats = await Promise.all(
|
||||
generatedFiles.map(async (file) => {
|
||||
const stat = await fs.stat(path.join(COMFY_OUTPUT_DIR!, file));
|
||||
return { file, mtime: stat.mtime };
|
||||
})
|
||||
);
|
||||
|
||||
fileStats.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||
|
||||
const latestFile = fileStats[0].file;
|
||||
const newFilePath = path.resolve('./generated', baseFileName);
|
||||
|
||||
await fs.mkdir('./generated', { recursive: true });
|
||||
|
||||
const sourcePath = path.join(COMFY_OUTPUT_DIR!, latestFile);
|
||||
try {
|
||||
await fs.unlink(newFilePath);
|
||||
} catch (err) {
|
||||
// ignore if not exists
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, newFilePath);
|
||||
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
|
||||
// basefilename is connected to image1
|
||||
// sencondfilename is connect to image2
|
||||
async function convertImageWithFileHandbag(
|
||||
prompt: string,
|
||||
baseFileName: string,
|
||||
secondFileName: string,
|
||||
comfyBaseUrl: string,
|
||||
comfyOutputDir: string,
|
||||
size: ImageSize = { width: 720, height: 1280 }
|
||||
): Promise<string> {
|
||||
const COMFY_BASE_URL = comfyBaseUrl.replace(/\/$/, '');
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_2_qwen_handbag.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['31']['inputs']['Value'] = size.width;
|
||||
workflow['32']['inputs']['Value'] = size.height;
|
||||
workflow['14']['inputs']['image'] = baseFileName;
|
||||
workflow['29']['inputs']['image'] = secondFileName;
|
||||
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
|
||||
let history;
|
||||
do {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
const historyResponse = await axios.get(`${COMFY_BASE_URL}/history/${promptId}`);
|
||||
history = historyResponse.data[promptId];
|
||||
} while (!history || Object.keys(history.outputs).length === 0);
|
||||
|
||||
const files = await fs.readdir(COMFY_OUTPUT_DIR!);
|
||||
const generatedFiles = files.filter(file => file.startsWith('combined'));
|
||||
|
||||
const fileStats = await Promise.all(
|
||||
generatedFiles.map(async (file) => {
|
||||
const stat = await fs.stat(path.join(COMFY_OUTPUT_DIR!, file));
|
||||
return { file, mtime: stat.mtime };
|
||||
})
|
||||
);
|
||||
|
||||
fileStats.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||
|
||||
const latestFile = fileStats[0].file;
|
||||
const newFilePath = path.resolve('./generated', baseFileName);
|
||||
|
||||
await fs.mkdir('./generated', { recursive: true });
|
||||
|
||||
const sourcePath = path.join(COMFY_OUTPUT_DIR!, latestFile);
|
||||
try {
|
||||
await fs.unlink(newFilePath);
|
||||
} catch (err) {
|
||||
// ignore if not exists
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, newFilePath);
|
||||
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
// basefilename is connected to image1
|
||||
// sencondfilename is connect to image2
|
||||
async function convertImageWithFileForPose(
|
||||
prompt: string,
|
||||
baseFileName: string,
|
||||
secondFileName: string,
|
||||
comfyBaseUrl: string,
|
||||
comfyOutputDir: string,
|
||||
size: ImageSize = { width: 720, height: 1280 }
|
||||
): Promise<string> {
|
||||
const COMFY_BASE_URL = comfyBaseUrl.replace(/\/$/, '');
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/edit_image_2_qwen_pose.json', 'utf-8'));
|
||||
workflow['21']['inputs']['value'] = prompt;
|
||||
workflow['31']['inputs']['Value'] = size.width;
|
||||
workflow['32']['inputs']['Value'] = size.height;
|
||||
workflow['14']['inputs']['image'] = baseFileName;
|
||||
workflow['29']['inputs']['image'] = secondFileName;
|
||||
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
@ -262,7 +394,7 @@ export async function convertImageVton(
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/vton.json', 'utf-8'));
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/vton_cloth.json', 'utf-8'));
|
||||
workflow['76']['inputs']['number'] = size.width;
|
||||
workflow['77']['inputs']['number'] = size.height;
|
||||
|
||||
@ -369,4 +501,4 @@ export async function convertImageVtonPose(
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
export { convertImage, convertImageWithFile };
|
||||
export { convertImage, convertImageWithFile, convertImageWithFileForPose, convertImageWithFileHandbag };
|
||||
|
||||
118
src/lib/image-upscaler.ts
Normal file
118
src/lib/image-upscaler.ts
Normal file
@ -0,0 +1,118 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import axios from 'axios';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
interface ImageSize {
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
async function facerestore_upscale(
|
||||
baseFileName: string,
|
||||
faceReferenceName: string,
|
||||
comfyBaseUrl: string,
|
||||
comfyOutputDir: string,
|
||||
): Promise<string> {
|
||||
const COMFY_BASE_URL = comfyBaseUrl.replace(/\/$/, '');
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/facerestore_upscale.json', 'utf-8'));
|
||||
workflow['1']['inputs']['image'] = baseFileName;
|
||||
workflow['3']['inputs']['image'] = faceReferenceName;
|
||||
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
|
||||
let history;
|
||||
do {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
const historyResponse = await axios.get(`${COMFY_BASE_URL}/history/${promptId}`);
|
||||
history = historyResponse.data[promptId];
|
||||
} while (!history || Object.keys(history.outputs).length === 0);
|
||||
|
||||
const files = await fs.readdir(COMFY_OUTPUT_DIR!);
|
||||
const generatedFiles = files.filter(file => file.startsWith('upscaled'));
|
||||
|
||||
const fileStats = await Promise.all(
|
||||
generatedFiles.map(async (file) => {
|
||||
const stat = await fs.stat(path.join(COMFY_OUTPUT_DIR!, file));
|
||||
return { file, mtime: stat.mtime };
|
||||
})
|
||||
);
|
||||
|
||||
fileStats.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||
|
||||
const latestFile = fileStats[0].file;
|
||||
const newFilePath = path.resolve('./generated', baseFileName);
|
||||
|
||||
await fs.mkdir('./generated', { recursive: true });
|
||||
|
||||
const sourcePath = path.join(COMFY_OUTPUT_DIR!, latestFile);
|
||||
try {
|
||||
await fs.unlink(newFilePath);
|
||||
} catch (err) {
|
||||
// ignore if not exists
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, newFilePath);
|
||||
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
|
||||
async function upscale(
|
||||
baseFileName: string,
|
||||
comfyBaseUrl: string,
|
||||
comfyOutputDir: string,
|
||||
): Promise<string> {
|
||||
const COMFY_BASE_URL = comfyBaseUrl.replace(/\/$/, '');
|
||||
const COMFY_OUTPUT_DIR = comfyOutputDir;
|
||||
let workflow;
|
||||
|
||||
workflow = JSON.parse(await fs.readFile('src/comfyworkflows/upscale.json', 'utf-8'));
|
||||
workflow['111']['inputs']['image'] = baseFileName;
|
||||
const response = await axios.post(`${COMFY_BASE_URL}/prompt`, { prompt: workflow });
|
||||
const promptId = response.data.prompt_id;
|
||||
|
||||
let history;
|
||||
do {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
const historyResponse = await axios.get(`${COMFY_BASE_URL}/history/${promptId}`);
|
||||
history = historyResponse.data[promptId];
|
||||
} while (!history || Object.keys(history.outputs).length === 0);
|
||||
|
||||
const files = await fs.readdir(COMFY_OUTPUT_DIR!);
|
||||
const generatedFiles = files.filter(file => file.startsWith('upscaled'));
|
||||
|
||||
const fileStats = await Promise.all(
|
||||
generatedFiles.map(async (file) => {
|
||||
const stat = await fs.stat(path.join(COMFY_OUTPUT_DIR!, file));
|
||||
return { file, mtime: stat.mtime };
|
||||
})
|
||||
);
|
||||
|
||||
fileStats.sort((a, b) => b.mtime.getTime() - a.mtime.getTime());
|
||||
|
||||
const latestFile = fileStats[0].file;
|
||||
const newFilePath = path.resolve('./generated', baseFileName);
|
||||
|
||||
await fs.mkdir('./generated', { recursive: true });
|
||||
|
||||
const sourcePath = path.join(COMFY_OUTPUT_DIR!, latestFile);
|
||||
try {
|
||||
await fs.unlink(newFilePath);
|
||||
} catch (err) {
|
||||
// ignore if not exists
|
||||
}
|
||||
|
||||
await fs.copyFile(sourcePath, newFilePath);
|
||||
|
||||
return newFilePath;
|
||||
}
|
||||
|
||||
|
||||
export { facerestore_upscale, upscale };
|
||||
@ -36,6 +36,81 @@ export async function getPinUrlFromPinterest(keyword: string): Promise<string |
|
||||
}
|
||||
}
|
||||
|
||||
export async function downloadImagesFromPinterestSearch(keyword: string, count: number): Promise<string[]> {
|
||||
const browser = await puppeteer.launch({ headless: false });
|
||||
const page = await browser.newPage();
|
||||
await page.setUserAgent('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36');
|
||||
await page.setViewport({ width: 1920, height: 1080 });
|
||||
try {
|
||||
const searchUrl = `https://www.pinterest.com/search/pins/?q=${encodeURIComponent(keyword)}`;
|
||||
await page.goto(searchUrl, { waitUntil: 'networkidle2' });
|
||||
|
||||
logger.info(`Scrolling 3 times...`);
|
||||
for (let i = 0; i < 3; i++) {
|
||||
await page.evaluate('window.scrollTo(0, document.body.scrollHeight)');
|
||||
await new Promise(resolve => setTimeout(resolve, Math.random() * 1000 + 1000));
|
||||
}
|
||||
|
||||
const imageUrls = await page.$$eval('img', (imgs) => {
|
||||
const urls: string[] = imgs.map(img => {
|
||||
const srcset = img.getAttribute('srcset') || '';
|
||||
if (!srcset) return '';
|
||||
const parts = srcset.split(',').map(p => p.trim());
|
||||
for (const part of parts) {
|
||||
const m = part.match(/^(\S+)\s+4x$/);
|
||||
if (m && m[1]) return m[1];
|
||||
}
|
||||
const src = img.src || '';
|
||||
if (src.includes('/originals/')) return src;
|
||||
return '';
|
||||
}).filter(s => !!s && s.includes('pinimg'));
|
||||
// Remove duplicates
|
||||
return [...new Set(urls)];
|
||||
});
|
||||
|
||||
if (imageUrls.length === 0) {
|
||||
logger.warn(`No 4x image URLs found for keyword "${keyword}"`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// shuffle and pick up to `count` unique images
|
||||
const shuffled = imageUrls.slice().sort(() => 0.5 - Math.random());
|
||||
const chosen = shuffled.slice(0, Math.min(count, shuffled.length));
|
||||
|
||||
const outDir = path.join(process.cwd(), 'download');
|
||||
await fs.mkdir(outDir, { recursive: true });
|
||||
|
||||
const results: string[] = [];
|
||||
for (let i = 0; i < chosen.length; i++) {
|
||||
const src = chosen[i];
|
||||
try {
|
||||
const imgPage = await browser.newPage();
|
||||
const resp = await imgPage.goto(src, { timeout: 30000, waitUntil: 'networkidle2' });
|
||||
if (!resp) {
|
||||
logger.warn(`Failed to fetch image ${src}`);
|
||||
await imgPage.close();
|
||||
continue;
|
||||
}
|
||||
const buffer = await resp.buffer();
|
||||
const timestamp = Date.now();
|
||||
const outPath = path.join(outDir, `${keyword.replace(/\s+/g, '_')}_${timestamp}_${i}.png`);
|
||||
await fs.writeFile(outPath, buffer);
|
||||
results.push(outPath);
|
||||
await imgPage.close();
|
||||
} catch (err) {
|
||||
logger.error(`Failed to download image ${src}:`, err);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error while downloading images for keyword "${keyword}":`, error);
|
||||
return [];
|
||||
} finally {
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Download up to `count` images from a pin URL by opening the pin page and scro lling up to 5 times to trigger lazy loading
|
||||
// Returns an array of saved image paths (may be empty)
|
||||
|
||||
67
src/lib/util.ts
Normal file
67
src/lib/util.ts
Normal file
@ -0,0 +1,67 @@
|
||||
// png-json-metadata.ts
|
||||
import * as fs from "fs";
|
||||
import extract from "png-chunks-extract";
|
||||
import encodeChunks from "png-chunks-encode";
|
||||
import * as textChunk from "png-chunk-text";
|
||||
|
||||
type PngChunk = { name: string; data: Uint8Array };
|
||||
|
||||
/**
|
||||
* PNG へ JSON を Base64 で埋め込む(tEXt / keyword: "json-b64")
|
||||
* - JSON は UTF-8 → Base64 にして ASCII 化(tEXt の Latin-1 制限を回避)
|
||||
* - 既存の "json-b64" tEXt があれば置き換え(重複回避)
|
||||
*/
|
||||
export async function embedJsonToPng(path: string, obj: unknown): Promise<void> {
|
||||
const input = fs.readFileSync(path);
|
||||
const chunks = extract(input) as PngChunk[];
|
||||
|
||||
// 既存の "json-b64" tEXt を除外
|
||||
const filtered: PngChunk[] = chunks.filter((c) => {
|
||||
if (c.name !== "tEXt") return true;
|
||||
try {
|
||||
const decoded = textChunk.decode(c.data); // { keyword, text }
|
||||
return decoded.keyword !== "json-b64";
|
||||
} catch {
|
||||
// decode 失敗(別の形式など)は残す
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
const json = JSON.stringify(obj);
|
||||
const b64 = Buffer.from(json, "utf8").toString("base64"); // ASCII のみ
|
||||
|
||||
// encode() は { name:'tEXt', data: Uint8Array } を返す
|
||||
const newChunk = textChunk.encode("json-b64", b64) as PngChunk;
|
||||
|
||||
// IEND の直前に挿入(PNG の正しい順序を維持)
|
||||
const iendIndex = filtered.findIndex((c) => c.name === "IEND");
|
||||
if (iendIndex < 0) {
|
||||
throw new Error("Invalid PNG: missing IEND chunk.");
|
||||
}
|
||||
filtered.splice(iendIndex, 0, newChunk);
|
||||
|
||||
const out = Buffer.from(encodeChunks(filtered));
|
||||
fs.writeFileSync(path, out);
|
||||
}
|
||||
|
||||
/**
|
||||
* PNG から Base64 JSON(tEXt / keyword: "json-b64")を読み出す
|
||||
*/
|
||||
export async function readJsonToPng(path: string): Promise<any> {
|
||||
const input = fs.readFileSync(path);
|
||||
const chunks = extract(input) as PngChunk[];
|
||||
|
||||
for (const c of chunks) {
|
||||
if (c.name !== "tEXt") continue;
|
||||
try {
|
||||
const { keyword, text } = textChunk.decode(c.data);
|
||||
if (keyword === "json-b64") {
|
||||
const json = Buffer.from(text, "base64").toString("utf8");
|
||||
return JSON.parse(json);
|
||||
}
|
||||
} catch {
|
||||
// 他の tEXt / 壊れたエントリは無視
|
||||
}
|
||||
}
|
||||
throw new Error("No base64 JSON found in PNG (tEXt keyword 'json-b64').");
|
||||
}
|
||||
@ -13,8 +13,8 @@ const PINS_TO_COLLECT = 5;
|
||||
|
||||
// Hard-coded user prompt
|
||||
const HARDCODED_USER_PROMPT = process.env.HARDCODED_USER_PROMPT || `
|
||||
Generate 20 keywords for photos of a ghotst or monster from all over the world. "Cute Japanese yokai" is mandatory, also add "Realistic photo cute" keyword to all genearated keywords first.
|
||||
Example output : ["Cute Japanese yokai","Realistic photo Cute ghost","Realistic photo cute monster","Realistic photo cute haloween monster","Realistic photo cute haloween ghost"... and 20 items in array]
|
||||
Generate 20 keywords for perfume brand photo. List of 20 most famous perfume brands, and its popular perfume names:
|
||||
Example output : ["chanel N5", "dior j'adore", "gucci bloom"....]
|
||||
`;
|
||||
|
||||
async function getPinUrlsFromPinterest(keyword: string, scrollCount = SCROLL_SEARCH, limit = PINS_TO_COLLECT): Promise<string[]> {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
75
src/product/clean_background.ts
Normal file
75
src/product/clean_background.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import { convertImage } from '../lib/image-converter';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import dotenv from 'dotenv';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const inputDir = path.join(__dirname, '../../input');
|
||||
const outputDir = path.join(__dirname, '../../generated/clearned');
|
||||
|
||||
const comfyUrl = process.env.SERVER1_COMFY_BASE_URL;
|
||||
const comfyOutputDir = process.env.SERVER1_COMFY_OUTPUT_DIR;
|
||||
|
||||
if (!comfyUrl || !comfyOutputDir) {
|
||||
console.error("ComfyUI URL or Output Directory is not set in environment variables.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const comfyInputDir = comfyOutputDir.replace("output", "input");
|
||||
|
||||
async function processImages() {
|
||||
await fs.ensureDir(outputDir);
|
||||
|
||||
const files = await fs.readdir(inputDir);
|
||||
let index = 1;
|
||||
|
||||
for (const file of files) {
|
||||
const sourceFilePath = path.join(inputDir, file);
|
||||
const stats = await fs.stat(sourceFilePath);
|
||||
|
||||
if (stats.isFile()) {
|
||||
console.log(`Processing ${file}...`);
|
||||
|
||||
const comfyInputPath = path.join(comfyInputDir, file);
|
||||
|
||||
try {
|
||||
// 1. Copy file to ComfyUI input directory
|
||||
await fs.copy(sourceFilePath, comfyInputPath);
|
||||
console.log(`Copied ${file} to ComfyUI input.`);
|
||||
|
||||
const prompt = "请从图1中提取主要主体,把背景设置为浅灰色,并让主体正面朝向,制作成产品照片。";
|
||||
|
||||
// 2. Call convertImage with correct parameters
|
||||
const generatedFilePath = await convertImage(prompt, file, comfyUrl!, comfyOutputDir!);
|
||||
|
||||
if (generatedFilePath && await fs.pathExists(generatedFilePath)) {
|
||||
const outputFilename = `clearned_${index}.png`;
|
||||
const finalOutputPath = path.join(outputDir, outputFilename);
|
||||
|
||||
// 3. Move the generated file to the final destination
|
||||
await fs.move(generatedFilePath, finalOutputPath, { overwrite: true });
|
||||
console.log(`Saved cleaned image to ${finalOutputPath}`);
|
||||
index++;
|
||||
|
||||
// 4. Delete the original file from the script's input directory
|
||||
await fs.unlink(sourceFilePath);
|
||||
console.log(`Deleted original file: ${file}`);
|
||||
}
|
||||
|
||||
// 5. Clean up the file from ComfyUI input directory
|
||||
await fs.unlink(comfyInputPath);
|
||||
console.log(`Cleaned up ${file} from ComfyUI input.`);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Failed to process ${file}:`, error);
|
||||
// If something fails, make sure to clean up the copied file if it exists
|
||||
if (await fs.pathExists(comfyInputPath)) {
|
||||
await fs.unlink(comfyInputPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
processImages().catch(console.error);
|
||||
88
src/product/generate_image.ts
Normal file
88
src/product/generate_image.ts
Normal file
@ -0,0 +1,88 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import dotenv from 'dotenv';
|
||||
import { readJsonToPng, embedJsonToPng } from '../lib/util';
|
||||
import { convertImage } from '../lib/image-converter';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const inputDir = './generated/prompts';
|
||||
const outputDir = './generated/image';
|
||||
|
||||
const COMFY_BASE_URL = process.env.SERVER1_COMFY_BASE_URL!;
|
||||
const COMFY_OUTPUT_DIR = process.env.SERVER1_COMFY_OUTPUT_DIR!;
|
||||
|
||||
interface PngMetadata {
|
||||
prompts: {
|
||||
imagePrompt: string;
|
||||
videoPrompt: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await fs.mkdir(outputDir, { recursive: true });
|
||||
|
||||
const files = await fs.readdir(inputDir);
|
||||
let generatedImageIndex = 0;
|
||||
|
||||
for (const file of files) {
|
||||
if (path.extname(file).toLowerCase() !== '.png') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const inputFile = path.join(inputDir, file);
|
||||
const metadata = await readJsonToPng(inputFile) as PngMetadata;
|
||||
|
||||
if (metadata && metadata.prompts && Array.isArray(metadata.prompts)) {
|
||||
console.log(`Processing ${file} with ${metadata.prompts.length} prompt pairs.`);
|
||||
|
||||
const inputfolderFullpath = COMFY_OUTPUT_DIR.replace("output", "input");
|
||||
await fs.copyFile(inputFile, path.join(inputfolderFullpath, file));
|
||||
|
||||
for (const promptPair of metadata.prompts) {
|
||||
const { imagePrompt, videoPrompt } = promptPair;
|
||||
const newFileName = `cleaned_prompt_generated_${generatedImageIndex}.png`;
|
||||
generatedImageIndex++;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
try {
|
||||
await fs.access(outputPath);
|
||||
console.log(`File ${newFileName} already exists, skipping.`);
|
||||
continue;
|
||||
} catch (error) {
|
||||
// File does not exist, proceed with generation
|
||||
}
|
||||
|
||||
console.log(`Generating image for prompt: "${imagePrompt}"`);
|
||||
|
||||
try {
|
||||
const generatedFilePath = await convertImage(
|
||||
imagePrompt,
|
||||
file, // Using the same image for both inputs as per interpretation
|
||||
COMFY_BASE_URL,
|
||||
COMFY_OUTPUT_DIR
|
||||
);
|
||||
|
||||
// The convertImage function saves the file in a generic location.
|
||||
// We need to move it to the correct location with the correct name.
|
||||
await fs.rename(generatedFilePath, outputPath);
|
||||
|
||||
const newMetadata = {
|
||||
imagePrompt: imagePrompt,
|
||||
videoPrompt: videoPrompt
|
||||
};
|
||||
|
||||
await embedJsonToPng(outputPath, newMetadata);
|
||||
|
||||
console.log(`Successfully generated and saved ${newFileName} with metadata.`);
|
||||
} catch (error) {
|
||||
console.error(`Error generating image for prompt "${imagePrompt}":`, error);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
console.log(`Skipping ${file}, no valid prompts metadata found.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
169
src/product/generate_prompt.ts
Normal file
169
src/product/generate_prompt.ts
Normal file
@ -0,0 +1,169 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { callLMStudioAPIWithFile, callLmstudio } from '../lib/lmstudio';
|
||||
import { embedJsonToPng } from '../lib/util';
|
||||
import { downloadImagesFromPinterestSearch } from '../lib/pinterest';
|
||||
import { logger } from '../lib/logger';
|
||||
import sharp from 'sharp';
|
||||
|
||||
const INPUT_DIR = path.join(process.cwd(), 'input');
|
||||
const OUTPUT_DIR = path.join(process.cwd(), 'generated', 'prompts');
|
||||
|
||||
if (!fs.existsSync(OUTPUT_DIR)) {
|
||||
fs.mkdirSync(OUTPUT_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
async function generatePromptsForImage(imagePath: string, index: number) {
|
||||
const outputFilePath = path.join(OUTPUT_DIR, `cleaned_prompt_${index}.png`);
|
||||
logger.info(`Processing image: ${path.basename(imagePath)} -> ${path.basename(outputFilePath)}`);
|
||||
|
||||
try {
|
||||
// Step 1: Detect main object and generate colors from the input image
|
||||
const colorGenerationPrompt = `
|
||||
You are a creative assistant. Analyze the provided image.
|
||||
Identify the main subject product ( not a product name).
|
||||
Then, list exactly five colors related to this subject:
|
||||
- Two colors that are common for this object.
|
||||
- Two colors that are uncommon but plausible.
|
||||
- One color that is completely crazy or surreal for this object.
|
||||
|
||||
Output strictly in this JSON format:
|
||||
{
|
||||
"result": {
|
||||
"main_object": "the identified noun",
|
||||
"colors": [
|
||||
"color1",
|
||||
"color2",
|
||||
"color3",
|
||||
"color4",
|
||||
"color5"
|
||||
]
|
||||
}
|
||||
}
|
||||
`;
|
||||
const colorResponse = await callLMStudioAPIWithFile(imagePath, colorGenerationPrompt);
|
||||
const { main_object, colors } = colorResponse.result;
|
||||
|
||||
if (!main_object || !Array.isArray(colors) || colors.length !== 5) {
|
||||
logger.error(`Failed to get a valid main object and color list for ${imagePath}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info(`Main object: "${main_object}", Colors: ${colors.join(', ')}`);
|
||||
|
||||
const prompts: { imagePrompt: string, videoPrompt: string }[] = [];
|
||||
const themes = ["special", "unique", "beautiful", "crazy", "funny"];
|
||||
|
||||
// Step 2: Iterate through each color
|
||||
for (const color of colors) {
|
||||
const randomTheme = themes[Math.floor(Math.random() * themes.length)];
|
||||
const pinterestQuery = `${main_object} product photo ${color} background ${randomTheme}`;
|
||||
logger.info(`Searching Pinterest for: "${pinterestQuery}"`);
|
||||
|
||||
// Step 3: Get an image from Pinterest
|
||||
const downloadedImages = await downloadImagesFromPinterestSearch(pinterestQuery, 1);
|
||||
if (downloadedImages.length === 0) {
|
||||
logger.warn(`Could not find an image on Pinterest for query: "${pinterestQuery}"`);
|
||||
continue;
|
||||
}
|
||||
const pinterestImagePath = downloadedImages[0];
|
||||
logger.info(`Downloaded Pinterest image: ${pinterestImagePath}`);
|
||||
|
||||
// Step 4: Generate a detailed prompt from the Pinterest image
|
||||
const imagePromptRequest = `
|
||||
You are an expert in generating descriptive prompts for image generation models.
|
||||
Analyze the provided image and describe it in a single, detailed paragraph.
|
||||
Focus on style, mood, lighting, color palette, sub-objects, and composition.
|
||||
Do not mention the main object itself. The prompt should be about the scene.
|
||||
|
||||
Output strictly in this JSON format:
|
||||
{
|
||||
"result": "your generated prompt here"
|
||||
}
|
||||
`;
|
||||
const imagePromptResponse = await callLMStudioAPIWithFile(pinterestImagePath, imagePromptRequest);
|
||||
const imagePrompt = imagePromptResponse.result;
|
||||
|
||||
if (imagePrompt) {
|
||||
logger.info(`Generated image prompt for color ${color}: "${imagePrompt}"`);
|
||||
|
||||
// Step 5: Generate a matching video prompt
|
||||
const videoPromptRequest = `
|
||||
You are a creative director for a short, stylish video ad.
|
||||
Based on the provided image and the following scene description, generate an attractive video prompt.
|
||||
|
||||
Main Subject: ${main_object}
|
||||
Scene Description: ${imagePrompt}
|
||||
|
||||
The video prompt should:
|
||||
- Be in English and approximately 50 words.
|
||||
- Describe one clear action involving the main subject.
|
||||
- Include one specific camera movement (e.g., slow zoom in, orbiting shot, push-in, pull-out).
|
||||
- Be dynamic and visually appealing.
|
||||
|
||||
Output strictly in this JSON format:
|
||||
{
|
||||
"result": "your generated video prompt here"
|
||||
}
|
||||
`;
|
||||
const videoPromptResponse = await callLMStudioAPIWithFile(pinterestImagePath, videoPromptRequest);
|
||||
const videoPrompt = videoPromptResponse.result;
|
||||
|
||||
if (videoPrompt) {
|
||||
logger.info(`Generated video prompt for color ${color}: "${videoPrompt}"`);
|
||||
prompts.push({ imagePrompt, videoPrompt });
|
||||
} else {
|
||||
logger.warn(`Failed to generate a video prompt for ${pinterestImagePath}`);
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Failed to generate an image prompt for ${pinterestImagePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (prompts.length === 0) {
|
||||
logger.error(`No prompt pairs were generated for ${imagePath}. Aborting.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 6: Embed all prompts into the original image and save to the new location
|
||||
const metadata = {
|
||||
prompts: prompts
|
||||
};
|
||||
|
||||
// Convert original image to a valid PNG at the output path before embedding
|
||||
await sharp(imagePath)
|
||||
.toFormat('png')
|
||||
.toFile(outputFilePath);
|
||||
|
||||
await embedJsonToPng(outputFilePath, metadata);
|
||||
|
||||
logger.info(`Successfully generated prompts and saved metadata to ${outputFilePath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`An error occurred while processing ${imagePath}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const files = fs.readdirSync(INPUT_DIR);
|
||||
const imageFiles = files.filter(file => /\.(png|jpg|jpeg)$/i.test(file));
|
||||
|
||||
if (imageFiles.length === 0) {
|
||||
console.log('No images found in the input directory.');
|
||||
return;
|
||||
}
|
||||
|
||||
for (let i = 0; i < imageFiles.length; i++) {
|
||||
const imageFile = imageFiles[i];
|
||||
const imagePath = path.join(INPUT_DIR, imageFile);
|
||||
await generatePromptsForImage(imagePath, i);
|
||||
}
|
||||
|
||||
console.log('All images processed.');
|
||||
} catch (error) {
|
||||
console.error('An error occurred in the main process:', error);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
74
src/product/generate_video.ts
Normal file
74
src/product/generate_video.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import dotenv from 'dotenv';
|
||||
import { readJsonToPng } from '../lib/util';
|
||||
import { generateVideo } from '../lib/video-generator';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const inputDir = './input';
|
||||
const outputDir = './generated/video';
|
||||
|
||||
const COMFY_BASE_URL = process.env.SERVER1_COMFY_BASE_URL!;
|
||||
const COMFY_OUTPUT_DIR = process.env.SERVER1_COMFY_OUTPUT_DIR!;
|
||||
|
||||
|
||||
interface PngMetadata {
|
||||
imagePrompt: string;
|
||||
videoPrompt: string;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await fs.mkdir(outputDir, { recursive: true });
|
||||
|
||||
const files = await fs.readdir(inputDir);
|
||||
const pngFiles = files.filter(file => path.extname(file).toLowerCase() === '.png');
|
||||
|
||||
for (let i = 0; i < pngFiles.length; i++) {
|
||||
const file = pngFiles[i];
|
||||
|
||||
const inputFile = path.join(inputDir, file);
|
||||
const metadata = await readJsonToPng(inputFile) as PngMetadata;
|
||||
|
||||
if (metadata && metadata.videoPrompt) {
|
||||
console.log(`Processing ${file} for video generation.`);
|
||||
|
||||
const originalFileName = path.parse(file).name;
|
||||
const nameParts = originalFileName.split('_');
|
||||
const promptIndex = nameParts[nameParts.length - 1];
|
||||
const newFileName = `product_${i}_${promptIndex}.mp4`;
|
||||
const outputPath = path.join(outputDir, newFileName);
|
||||
|
||||
try {
|
||||
await fs.access(outputPath);
|
||||
console.log(`File ${newFileName} already exists, skipping.`);
|
||||
continue;
|
||||
} catch (error) {
|
||||
// File does not exist, proceed with generation
|
||||
}
|
||||
|
||||
console.log(`Generating video for prompt: "${metadata.videoPrompt}"`);
|
||||
|
||||
const inputfolderFullpath = COMFY_OUTPUT_DIR.replace("output", "input");
|
||||
await fs.copyFile(inputFile, path.join(inputfolderFullpath, file));
|
||||
|
||||
try {
|
||||
await generateVideo(
|
||||
metadata.videoPrompt,
|
||||
file,
|
||||
newFileName,
|
||||
COMFY_BASE_URL,
|
||||
COMFY_OUTPUT_DIR
|
||||
);
|
||||
|
||||
console.log(`Successfully generated and saved ${newFileName}`);
|
||||
} catch (error) {
|
||||
console.error(`Error generating video for ${file}:`, error);
|
||||
}
|
||||
} else {
|
||||
console.log(`Skipping ${file}, no valid videoPrompt metadata found.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
64
src/product/photo_download.ts
Normal file
64
src/product/photo_download.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { callLmstudio } from '../lib/lmstudio';
|
||||
import { logger } from '../lib/logger';
|
||||
import dotenv from 'dotenv';
|
||||
import { downloadImagesFromPinterestSearch } from '../lib/pinterest';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
const PHOTOS_PER_KEYWORD = 10;
|
||||
|
||||
// Hard-coded user prompt
|
||||
const HARDCODED_USER_PROMPT = process.env.HARDCODED_USER_PROMPT || `
|
||||
Generate 20 keywords for various photogeneric product. List of 20 most common photo generic product :
|
||||
Example output : ["food", "perfume", "accesory", "jewelry", "shoes"...]
|
||||
`;
|
||||
|
||||
// Re-usable helper to extract JSON embedded in text
|
||||
function extractJsonFromText(text: string): any | null {
|
||||
if (!text || typeof text !== 'string') return null;
|
||||
const fenced = text.match(/```(?:json)?\s*([\s\S]*?)\s*```/i);
|
||||
if (fenced && fenced[1]) {
|
||||
try { return JSON.parse(fenced[1].trim()); } catch (e) { /* fall through */ }
|
||||
}
|
||||
const brace = text.match(/\{[\s\S]*\}|\[[\s\S]*\]/);
|
||||
if (brace && brace[0]) {
|
||||
try { return JSON.parse(brace[0]); } catch (e) { return null; }
|
||||
}
|
||||
// Attempt line-separated keywords fallback
|
||||
const lines = text.split(/\r?\n/).map((l: string) => l.trim()).filter(Boolean);
|
||||
if (lines.length > 1) return lines;
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
(async () => {
|
||||
logger.info(`Starting photo download process with prompt: "${HARDCODED_USER_PROMPT}"`);
|
||||
|
||||
// 1. Extract keywords from the hardcoded prompt
|
||||
const keywords = ["fullbody portrait girl", "fullbody portrait 18y girl", "fullbody portrait cute girl", "fullbody portrait blond girl", "fullbody portrait 20y girl"];
|
||||
|
||||
if (!keywords || keywords.length === 0) {
|
||||
logger.error("Could not extract keywords from prompt. Exiting.");
|
||||
return;
|
||||
}
|
||||
logger.info(`Extracted keywords: ${keywords.join(', ')}`);
|
||||
|
||||
// 2. Search Pinterest for each keyword and download photos directly
|
||||
let totalDownloads = 0;
|
||||
for (const keyword of keywords) {
|
||||
try {
|
||||
logger.info(`Downloading photos for keyword: "${keyword}"`);
|
||||
const downloadedPaths = await downloadImagesFromPinterestSearch(`${keyword}`, PHOTOS_PER_KEYWORD);
|
||||
if (downloadedPaths.length > 0) {
|
||||
logger.info(`Successfully downloaded ${downloadedPaths.length} images for "${keyword}"`);
|
||||
totalDownloads += downloadedPaths.length;
|
||||
} else {
|
||||
logger.warn(`No images were downloaded for "${keyword}"`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`An error occurred while processing keyword ${keyword}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Photo download process finished. Total images downloaded: ${totalDownloads}`);
|
||||
})();
|
||||
54
src/tools/embed_prompt_to_png.ts
Normal file
54
src/tools/embed_prompt_to_png.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { callLMStudioAPIWithFile } from '../lib/lmstudio';
|
||||
import { embedJsonToPng } from '../lib/util';
|
||||
|
||||
const imageDir = 'C:\\Users\\fm201\\Desktop\\vton\\bags';
|
||||
|
||||
async function processImages() {
|
||||
try {
|
||||
const files = fs.readdirSync(imageDir);
|
||||
const imageFiles = files.filter(file => /\.(png)$/i.test(file));
|
||||
|
||||
for (const imageFile of imageFiles) {
|
||||
const imagePath = path.join(imageDir, imageFile);
|
||||
console.log(`Processing ${imagePath}...`);
|
||||
|
||||
const prompt = `
|
||||
Based on the handbag in the image, generate 10 outfit prompts that would complement it.
|
||||
Each prompt should be a short, descriptive sentence of around 20 words.
|
||||
Return the result in the following JSON format:
|
||||
{"result": ["outfit prompt 1", "outfit prompt 2", ...]}
|
||||
`;
|
||||
|
||||
try {
|
||||
const response = await callLMStudioAPIWithFile(imagePath, prompt);
|
||||
let outfitPrompts;
|
||||
|
||||
if (typeof response === 'string') {
|
||||
try {
|
||||
outfitPrompts = JSON.parse(response);
|
||||
} catch (e) {
|
||||
console.error(`Failed to parse JSON string for ${imageFile}:`, response);
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
outfitPrompts = response;
|
||||
}
|
||||
|
||||
if (outfitPrompts && outfitPrompts.result) {
|
||||
await embedJsonToPng(imagePath, outfitPrompts);
|
||||
console.log(`Successfully embedded prompts into ${imageFile}`);
|
||||
} else {
|
||||
console.error(`Invalid JSON response for ${imageFile}:`, response);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to process ${imageFile}:`, error);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading image directory:', error);
|
||||
}
|
||||
}
|
||||
|
||||
processImages();
|
||||
@ -2,35 +2,31 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { convertImageVton, convertImage } from '../lib/image-converter';
|
||||
import * as dotenv from 'dotenv';
|
||||
import sharp from 'sharp';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
<<<<<<< HEAD
|
||||
const modelsBodyDir = 'D:\\CatsEye\\long videos\\vton-demo\\VTON\\models_body';
|
||||
const clothesDir = 'D:\\CatsEye\\long videos\\vton-demo\\VTON\\clothes';
|
||||
const posesDir = 'D:\\CatsEye\\long videos\\vton-demo\\VTON\\poses';
|
||||
=======
|
||||
const clothesDir = 'C:\\Users\\fm201\\Documents\\VTON\\\clothes';
|
||||
const modelPath = 'C:\\Users\\fm201\\Documents\\VTON\\models\\Jessica_body.png';
|
||||
const posesDir = 'C:\\Users\\fm201\\Documents\\VTON\\\poses';
|
||||
>>>>>>> bdca42e82102a00f771ecf58b4ff0673dbd218af
|
||||
const clothesDir = 'D:\\projects\\random_video_maker\\input';
|
||||
const outputDir = 'generated';
|
||||
|
||||
const comfyBaseUrl = process.env.SERVER2_COMFY_BASE_URL;
|
||||
const comfyOutputDir = process.env.SERVER2_COMFY_OUTPUT_DIR;
|
||||
const comfyBaseUrl = process.env.SERVER1_COMFY_BASE_URL;
|
||||
const comfyOutputDir = process.env.SERVER1_COMFY_OUTPUT_DIR;
|
||||
|
||||
function getNextIndex(directory: string): number {
|
||||
if (!fs.existsSync(directory)) {
|
||||
fs.mkdirSync(directory, { recursive: true });
|
||||
return 0;
|
||||
}
|
||||
const files = fs.readdirSync(directory);
|
||||
const vtonFiles = files.filter(file => file.startsWith('vton_') && file.endsWith('.png'));
|
||||
if (vtonFiles.length === 0) {
|
||||
const dirs = fs.readdirSync(directory, { withFileTypes: true })
|
||||
.filter(dirent => dirent.isDirectory())
|
||||
.map(dirent => dirent.name);
|
||||
|
||||
const vtonDirs = dirs.filter(dir => dir.startsWith('vton_'));
|
||||
if (vtonDirs.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
const indices = vtonFiles.map(file => {
|
||||
const match = file.match(/vton_(\d+)\.png/);
|
||||
const indices = vtonDirs.map(dir => {
|
||||
const match = dir.match(/vton_(\d+)/);
|
||||
return match ? parseInt(match[1], 10) : -1;
|
||||
});
|
||||
return Math.max(...indices) + 1;
|
||||
@ -55,27 +51,96 @@ async function generateVtonImages() {
|
||||
const comfyInputDir = comfyOutputDir.replace("output", "input");
|
||||
|
||||
while (true) { // Infinite loop
|
||||
const iterationDir = path.join(outputDir, `vton_${index}`);
|
||||
fs.mkdirSync(iterationDir, { recursive: true });
|
||||
|
||||
try {
|
||||
const personFilePath = getRandomFile(modelsBodyDir);
|
||||
const clothFilePath = getRandomFile(clothesDir);
|
||||
const poseFilePath = getRandomFile(posesDir);
|
||||
const personOrigPath = getRandomFile(clothesDir);
|
||||
const clothOrigPath = getRandomFile(clothesDir);
|
||||
|
||||
const personFileName = path.basename(personFilePath);
|
||||
const clothFileName = path.basename(clothFilePath);
|
||||
const poseFileName = path.basename(poseFilePath);
|
||||
fs.copyFileSync(personOrigPath, path.join(iterationDir, '1-personOrig.png'));
|
||||
fs.copyFileSync(clothOrigPath, path.join(iterationDir, '3-clothOrig.png'));
|
||||
|
||||
fs.copyFileSync(personFilePath, path.join(comfyInputDir, personFileName));
|
||||
fs.copyFileSync(clothFilePath, path.join(comfyInputDir, clothFileName));
|
||||
fs.copyFileSync(poseFilePath, path.join(comfyInputDir, poseFileName));
|
||||
const personOrigFileName = path.basename(personOrigPath);
|
||||
const clothOrigFileName = path.basename(clothOrigPath);
|
||||
|
||||
console.log(`Processing person: ${personFileName}, cloth: ${clothFileName}, pose: ${poseFileName}`);
|
||||
fs.copyFileSync(personOrigPath, path.join(comfyInputDir, personOrigFileName));
|
||||
fs.copyFileSync(clothOrigPath, path.join(comfyInputDir, clothOrigFileName));
|
||||
|
||||
const outputFilename = `vton_${index}.png`;
|
||||
console.log(`Processing person: ${personOrigPath}, cloth: ${clothOrigPath}`);
|
||||
|
||||
const generatedImagePath = await convertImageVton(personFileName, clothFileName, poseFileName, outputFilename, comfyBaseUrl, comfyOutputDir, { width: 720, height: 1280 });
|
||||
const cleanePersonImagePath = await convertImage("请把姿势改成站立的,转换成全身照片。去掉衣服,只保留白色运动文胸和白色短裤。双脚保持赤脚。背景为浅灰色。", personOrigFileName, comfyBaseUrl, comfyOutputDir, { width: 720, height: 1280 });
|
||||
fs.copyFileSync(cleanePersonImagePath, path.join(iterationDir, '2-personCleaned.png'));
|
||||
const cleanedPersonFileName = path.basename(cleanePersonImagePath);
|
||||
fs.copyFileSync(cleanePersonImagePath, path.join(comfyInputDir, cleanedPersonFileName));
|
||||
|
||||
const cleanedClothImagePath = await convertImage("请将图1中的上衣、下装和配饰分别提取出来,放到同一个浅灰色的背景上。", clothOrigFileName, comfyBaseUrl, comfyOutputDir, { width: 720, height: 1280 });
|
||||
fs.copyFileSync(cleanedClothImagePath, path.join(iterationDir, '4-clothCleaned.png'));
|
||||
const cleanedClothFileName = path.basename(cleanedClothImagePath);
|
||||
fs.copyFileSync(cleanedClothImagePath, path.join(comfyInputDir, cleanedClothFileName));
|
||||
|
||||
const outputFilename = `vton_final_${index}.png`;
|
||||
const generatedImagePath = await convertImageVton(cleanedPersonFileName, cleanedClothFileName, outputFilename, comfyBaseUrl, comfyOutputDir, { width: 720, height: 1280 });
|
||||
|
||||
if (generatedImagePath) {
|
||||
fs.copyFileSync(generatedImagePath, path.join(iterationDir, '5-finalResult.png'));
|
||||
console.log(`Generated image saved to ${generatedImagePath}`);
|
||||
|
||||
// --- Create composite image ---
|
||||
const imagePaths = [
|
||||
path.join(iterationDir, '1-personOrig.png'),
|
||||
path.join(iterationDir, '3-clothOrig.png'),
|
||||
path.join(iterationDir, '2-personCleaned.png'),
|
||||
path.join(iterationDir, '4-clothCleaned.png'),
|
||||
path.join(iterationDir, '5-finalResult.png')
|
||||
];
|
||||
|
||||
const resizedImages = [];
|
||||
let totalWidth = 10; // Initial left margin
|
||||
const resizedHeight = 720;
|
||||
|
||||
for (const imagePath of imagePaths) {
|
||||
const image = sharp(imagePath);
|
||||
const metadata = await image.metadata();
|
||||
if (!metadata.width || !metadata.height) {
|
||||
throw new Error(`Could not get metadata for image ${imagePath}`);
|
||||
}
|
||||
const resizedWidth = Math.round((metadata.width / metadata.height) * resizedHeight);
|
||||
|
||||
const resizedImageBuffer = await image.resize(resizedWidth, resizedHeight).toBuffer();
|
||||
resizedImages.push({
|
||||
buffer: resizedImageBuffer,
|
||||
width: resizedWidth
|
||||
});
|
||||
totalWidth += resizedWidth + 10; // Add image width and right margin
|
||||
}
|
||||
|
||||
const compositeOps = [];
|
||||
let currentLeft = 10; // Start with left margin
|
||||
|
||||
for (const img of resizedImages) {
|
||||
compositeOps.push({
|
||||
input: img.buffer,
|
||||
top: 10, // 10px top margin
|
||||
left: currentLeft
|
||||
});
|
||||
currentLeft += img.width + 10; // Move to the next position
|
||||
}
|
||||
|
||||
await sharp({
|
||||
create: {
|
||||
width: totalWidth,
|
||||
height: 740,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 255, b: 255, alpha: 1 }
|
||||
}
|
||||
})
|
||||
.composite(compositeOps)
|
||||
.toFile(path.join(iterationDir, 'process.png'));
|
||||
|
||||
console.log(`Generated composite image process.png in ${iterationDir}`);
|
||||
// --- End of composite image creation ---
|
||||
|
||||
index++;
|
||||
} else {
|
||||
console.error(`Failed to generate image for index ${index}`);
|
||||
|
||||
538
src/vton/handback_portrait.ts
Normal file
538
src/vton/handback_portrait.ts
Normal file
@ -0,0 +1,538 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as path from 'path';
|
||||
import dotenv from 'dotenv';
|
||||
import { downloadImagesFromPinterestSearch } from '../lib/pinterest';
|
||||
import { convertImage, convertImageWithFile, convertImageWithFileForPose, convertImageWithFileHandbag } from '../lib/image-converter';
|
||||
import { logger } from '../lib/logger';
|
||||
import { callLmstudio, callLMStudioAPIWithFile } from '../lib/lmstudio';
|
||||
import { upscale } from '../lib/image-upscaler';
|
||||
dotenv.config();
|
||||
|
||||
const SERVER1_COMFY_BASE_URL = process.env.SERVER1_COMFY_BASE_URL!;
|
||||
const SERVER1_COMFY_OUTPUT_DIR = process.env.SERVER1_COMFY_OUTPUT_DIR!;
|
||||
const imageSize: { width: number; height: number } = { width: 1280, height: 720 };
|
||||
|
||||
|
||||
async function upscaleAndFix(
|
||||
baseImage: string,
|
||||
faceImage: string,
|
||||
outputFilename: string,
|
||||
outputDir: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
|
||||
// Copy both images to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
|
||||
const baseFilePath = path.join(outputDir, baseImage);
|
||||
const referenceFilePath = path.join(outputDir, faceImage);
|
||||
|
||||
const baseFileName = path.basename(baseImage);
|
||||
const referenceFileName = path.basename(faceImage);
|
||||
|
||||
const inputBasePath = path.join(inputFolderFullPath, baseFileName);
|
||||
const inputReferencePath = path.join(inputFolderFullPath, referenceFileName);
|
||||
|
||||
logger.info(`Copying base image to ComfyUI input: ${inputBasePath}`);
|
||||
await fs.copyFile(baseFilePath, inputBasePath);
|
||||
|
||||
logger.info(`Copying reference image to ComfyUI input: ${inputReferencePath}`);
|
||||
await fs.copyFile(referenceFilePath, inputReferencePath);
|
||||
|
||||
// Convert images with prompt
|
||||
logger.info(`Processing images with convertImageWithFile...`);
|
||||
|
||||
const convertedImagePath = await upscale(
|
||||
baseFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
)
|
||||
|
||||
logger.info(`Converted image: ${convertedImagePath}`);
|
||||
|
||||
// Copy the converted image to final destination
|
||||
const finalOutputPath = path.join(outputDir, outputFilename);
|
||||
logger.info(`Copying to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(convertedImagePath, finalOutputPath);
|
||||
|
||||
logger.info(`✓ Successfully generated: ${finalOutputPath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error processing two images:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a single image: download from Pinterest, convert with prompt, and save
|
||||
* @param keyword - Pinterest search keyword
|
||||
* @param prompt - Image conversion prompt
|
||||
* @param filename - Output filename
|
||||
* @param outputDir - Directory to save the generated file
|
||||
* @param shouldConvert - Whether to convert the image with prompt or just copy it
|
||||
*/
|
||||
async function processImage(
|
||||
keyword: string,
|
||||
prompt: string,
|
||||
filename: string,
|
||||
outputDir: string,
|
||||
shouldConvert: boolean = true
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`\n=== Processing: ${filename} ===`);
|
||||
logger.info(`Keyword: ${keyword}`);
|
||||
logger.info(`Should convert: ${shouldConvert}`);
|
||||
|
||||
// Step 1: Download image from Pinterest
|
||||
logger.info(`Step 1: Downloading image from Pinterest with keyword: "${keyword}"...`);
|
||||
const downloadedImages = await downloadImagesFromPinterestSearch(keyword, 1);
|
||||
|
||||
if (downloadedImages.length === 0) {
|
||||
logger.error(`Failed to download image for keyword: "${keyword}"`);
|
||||
return;
|
||||
}
|
||||
|
||||
const downloadedImagePath = downloadedImages[0];
|
||||
logger.info(`Downloaded image: ${downloadedImagePath}`);
|
||||
|
||||
const finalOutputPath = path.join(outputDir, filename);
|
||||
|
||||
if (shouldConvert) {
|
||||
logger.info(`Prompt: ${prompt}`);
|
||||
|
||||
// Step 2: Copy image to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
|
||||
const imageFileName = path.basename(downloadedImagePath);
|
||||
const inputImagePath = path.join(inputFolderFullPath, imageFileName);
|
||||
|
||||
logger.info(`Step 2: Copying image to ComfyUI input folder: ${inputImagePath}`);
|
||||
await fs.copyFile(downloadedImagePath, inputImagePath);
|
||||
|
||||
// Step 3: Convert image with prompt
|
||||
logger.info(`Step 3: Converting image with prompt...`);
|
||||
const convertedImagePath = await convertImage(
|
||||
prompt,
|
||||
imageFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
imageSize
|
||||
);
|
||||
logger.info(`Converted image: ${convertedImagePath}`);
|
||||
|
||||
// Step 4: Copy the converted image to final destination
|
||||
logger.info(`Step 4: Copying to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(convertedImagePath, finalOutputPath);
|
||||
} else {
|
||||
// Just copy the downloaded image directly to the output directory with the specified filename
|
||||
logger.info(`Step 2: Copying directly to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(downloadedImagePath, finalOutputPath);
|
||||
}
|
||||
|
||||
logger.info(`✓ Successfully generated: ${finalOutputPath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error processing image for keyword "${keyword}":`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an existing image with a prompt
|
||||
* @param prompt - Image conversion prompt
|
||||
* @param imagePath - Path to the existing image
|
||||
* @param outputFilename - Output filename
|
||||
* @param outputDir - Directory to save the converted file
|
||||
*/
|
||||
async function convertImageWithPrompt(
|
||||
prompt: string,
|
||||
imagePath: string,
|
||||
outputFilename: string,
|
||||
outputDir: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`\n=== Converting Image: ${outputFilename} ===`);
|
||||
logger.info(`Source: ${imagePath}`);
|
||||
logger.info(`Prompt: ${prompt}`);
|
||||
|
||||
// Step 1: Copy image to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
|
||||
const imageFileName = path.basename(imagePath);
|
||||
const inputImagePath = path.join(inputFolderFullPath, imageFileName);
|
||||
|
||||
logger.info(`Step 1: Copying image to ComfyUI input folder: ${inputImagePath}`);
|
||||
await fs.copyFile(imagePath, inputImagePath);
|
||||
|
||||
// Step 2: Convert image with prompt
|
||||
logger.info(`Step 2: Converting image with prompt...`);
|
||||
const convertedImagePath = await convertImage(
|
||||
prompt,
|
||||
imageFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
imageSize
|
||||
);
|
||||
logger.info(`Converted image: ${convertedImagePath}`);
|
||||
|
||||
// Step 3: Copy the converted image to final destination
|
||||
const finalOutputPath = path.join(outputDir, outputFilename);
|
||||
logger.info(`Step 3: Copying to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(convertedImagePath, finalOutputPath);
|
||||
|
||||
logger.info(`✓ Successfully converted: ${finalOutputPath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error converting image:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process two images together: combine base image with reference image using prompt
|
||||
* @param prompt - Processing prompt
|
||||
* @param baseFile - Base image filename (in generated folder)
|
||||
* @param referenceFile - Reference image filename (in generated folder)
|
||||
* @param outputFilename - Output filename
|
||||
* @param outputDir - Directory to save the generated file
|
||||
*/
|
||||
async function processTwoImages(
|
||||
prompt: string,
|
||||
baseFile: string,
|
||||
referenceFile: string,
|
||||
outputFilename: string,
|
||||
outputDir: string,
|
||||
isPose: boolean = false
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`\n=== Processing: ${outputFilename} ===`);
|
||||
logger.info(`Base: ${baseFile}, Reference: ${referenceFile}`);
|
||||
logger.info(`Prompt: ${prompt}`);
|
||||
|
||||
// Copy both images to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
|
||||
const baseFilePath = path.join(outputDir, baseFile);
|
||||
const referenceFilePath = path.join(outputDir, referenceFile);
|
||||
|
||||
const baseFileName = path.basename(baseFile);
|
||||
const referenceFileName = path.basename(referenceFile);
|
||||
|
||||
const inputBasePath = path.join(inputFolderFullPath, baseFileName);
|
||||
const inputReferencePath = path.join(inputFolderFullPath, referenceFileName);
|
||||
|
||||
logger.info(`Copying base image to ComfyUI input: ${inputBasePath}`);
|
||||
await fs.copyFile(baseFilePath, inputBasePath);
|
||||
|
||||
logger.info(`Copying reference image to ComfyUI input: ${inputReferencePath}`);
|
||||
await fs.copyFile(referenceFilePath, inputReferencePath);
|
||||
|
||||
// Convert images with prompt
|
||||
logger.info(`Processing images with convertImageWithFile...`);
|
||||
|
||||
const convertedImagePath = isPose ? await convertImageWithFileForPose(
|
||||
prompt,
|
||||
baseFileName,
|
||||
referenceFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
imageSize
|
||||
) : await convertImageWithFile(
|
||||
prompt,
|
||||
baseFileName,
|
||||
referenceFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
imageSize
|
||||
)
|
||||
|
||||
logger.info(`Converted image: ${convertedImagePath}`);
|
||||
|
||||
// Copy the converted image to final destination
|
||||
const finalOutputPath = path.join(outputDir, outputFilename);
|
||||
logger.info(`Copying to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(convertedImagePath, finalOutputPath);
|
||||
|
||||
logger.info(`✓ Successfully generated: ${finalOutputPath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error processing two images:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Process two images together: combine base image with reference image using prompt
|
||||
* @param prompt - Processing prompt
|
||||
* @param baseFile - Base image filename (in generated folder)
|
||||
* @param referenceFile - Reference image filename (in generated folder)
|
||||
* @param outputFilename - Output filename
|
||||
* @param outputDir - Directory to save the generated file
|
||||
*/
|
||||
async function processTwoImagesHandbag(
|
||||
prompt: string,
|
||||
baseFile: string,
|
||||
referenceFile: string,
|
||||
outputFilename: string,
|
||||
outputDir: string,
|
||||
): Promise<void> {
|
||||
try {
|
||||
logger.info(`\n=== Processing: ${outputFilename} ===`);
|
||||
logger.info(`Base: ${baseFile}, Reference: ${referenceFile}`);
|
||||
logger.info(`Prompt: ${prompt}`);
|
||||
|
||||
// Copy both images to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
|
||||
const baseFilePath = path.join(outputDir, baseFile);
|
||||
const referenceFilePath = path.join(outputDir, referenceFile);
|
||||
|
||||
const baseFileName = path.basename(baseFile);
|
||||
const referenceFileName = path.basename(referenceFile);
|
||||
|
||||
const inputBasePath = path.join(inputFolderFullPath, baseFileName);
|
||||
const inputReferencePath = path.join(inputFolderFullPath, referenceFileName);
|
||||
|
||||
logger.info(`Copying base image to ComfyUI input: ${inputBasePath}`);
|
||||
await fs.copyFile(baseFilePath, inputBasePath);
|
||||
|
||||
logger.info(`Copying reference image to ComfyUI input: ${inputReferencePath}`);
|
||||
await fs.copyFile(referenceFilePath, inputReferencePath);
|
||||
|
||||
// Convert images with prompt
|
||||
logger.info(`Processing images with convertImageWithFile...`);
|
||||
|
||||
const convertedImagePath = await convertImageWithFileHandbag(
|
||||
prompt,
|
||||
baseFileName,
|
||||
referenceFileName,
|
||||
SERVER1_COMFY_BASE_URL,
|
||||
SERVER1_COMFY_OUTPUT_DIR,
|
||||
imageSize
|
||||
)
|
||||
|
||||
logger.info(`Converted image: ${convertedImagePath}`);
|
||||
|
||||
// Copy the converted image to final destination
|
||||
const finalOutputPath = path.join(outputDir, outputFilename);
|
||||
logger.info(`Copying to final destination: ${finalOutputPath}`);
|
||||
await fs.copyFile(convertedImagePath, finalOutputPath);
|
||||
|
||||
logger.info(`✓ Successfully generated: ${finalOutputPath}`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error processing two images:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Process a complete iteration: download base images and apply sequential transformations
|
||||
*/
|
||||
async function processIteration(iteration: number): Promise<void> {
|
||||
try {
|
||||
const timestamp = Date.now();
|
||||
logger.info(`\n${'='.repeat(80)}`);
|
||||
logger.info(`ITERATION ${iteration} - Starting with timestamp: ${timestamp}`);
|
||||
logger.info(`${'='.repeat(80)}`);
|
||||
|
||||
// Create output directory for this iteration
|
||||
const outputDir = path.join(process.cwd(), 'generated', `vton_${timestamp}`);
|
||||
await fs.mkdir(outputDir, { recursive: true });
|
||||
logger.info(`Output directory created: ${outputDir}`);
|
||||
|
||||
// === PHASE 1: Download base images ===
|
||||
logger.info(`\n--- PHASE 1: Downloading base images ---`);
|
||||
|
||||
await processImage(
|
||||
'cute girl face high resolution',
|
||||
'',
|
||||
`model_${timestamp}.png`,
|
||||
outputDir,
|
||||
false
|
||||
);
|
||||
|
||||
await processImage(
|
||||
'woman elegant outfit fullbody single',
|
||||
'',
|
||||
`outfit_${timestamp}.png`,
|
||||
outputDir,
|
||||
false
|
||||
);
|
||||
|
||||
await processImage(
|
||||
'photo elegant indoor room',
|
||||
'',
|
||||
`room_${timestamp}.png`,
|
||||
outputDir,
|
||||
false
|
||||
);
|
||||
|
||||
await processImage(
|
||||
'handbag single product photography',
|
||||
'请提取照片中的包,并将其正面朝向地放置在亮灰色背景上。',
|
||||
`handbag_${timestamp}.png`,
|
||||
outputDir,
|
||||
true
|
||||
);
|
||||
|
||||
await processImage(
|
||||
'woman portrait standing',
|
||||
'',
|
||||
`pose_${timestamp}.png`,
|
||||
outputDir,
|
||||
false
|
||||
);
|
||||
|
||||
// === PHASE 2: Sequential transformations ===
|
||||
logger.info(`\n--- PHASE 2: Sequential transformations ---`);
|
||||
|
||||
// Step 1: Generate outfit prompt using LMStudio API
|
||||
logger.info('Step 1: Generating outfit prompt with LMStudio API...');
|
||||
const outfitImagePath = path.join(outputDir, `outfit_${timestamp}.png`);
|
||||
const outfitPromptResponse = await callLMStudioAPIWithFile(
|
||||
outfitImagePath,
|
||||
'Describe this outfit in detail about 30 words. Focus on color and cloth type. Return the result in this format: {"result":""}'
|
||||
);
|
||||
const outfitPrompt = outfitPromptResponse.result || outfitPromptResponse;
|
||||
logger.info(`Generated outfit prompt: ${outfitPrompt}`);
|
||||
|
||||
// Step 2: Generate location prompt using LMStudio API
|
||||
logger.info('Step 2: Generating location prompt with LMStudio API...');
|
||||
const roomImagePath = path.join(outputDir, `room_${timestamp}.png`);
|
||||
const locationPromptResponse = await callLMStudioAPIWithFile(
|
||||
roomImagePath,
|
||||
'Describe this location/room in detail about 30 words. Return the result in this format: {"result":""}'
|
||||
);
|
||||
const locationPrompt = locationPromptResponse.result || locationPromptResponse;
|
||||
logger.info(`Generated location prompt: ${locationPrompt}`);
|
||||
|
||||
// Step 3: Generate Chinese prompt using LMStudio API
|
||||
logger.info('Step 3: Generating Chinese prompt for model transformation...');
|
||||
const chinesePromptRequest = `Generate a Chinese prompt for image transformation that describes:
|
||||
- Prefix: genereate a portarit photo of a woman in image1
|
||||
- Use outfit to: ${outfitPrompt}
|
||||
- Use location to: ${locationPrompt}
|
||||
|
||||
Return the result in this format: {"result":""}`;
|
||||
|
||||
const chinesePromptResponse = await callLmstudio(chinesePromptRequest);
|
||||
const chinesePrompt = chinesePromptResponse.result || chinesePromptResponse;
|
||||
logger.info(`Generated Chinese prompt: ${chinesePrompt}`);
|
||||
|
||||
// Process model with outfit and location using the Chinese prompt
|
||||
logger.info('Step 4: Processing model with outfit and location...');
|
||||
const modelImagePath = path.join(outputDir, `model_${timestamp}.png`);
|
||||
|
||||
// Copy model image to ComfyUI input directory
|
||||
const inputFolderFullPath = SERVER1_COMFY_OUTPUT_DIR.replace('output', 'input');
|
||||
await fs.mkdir(inputFolderFullPath, { recursive: true });
|
||||
const modelFileName = path.basename(modelImagePath);
|
||||
const inputModelPath = path.join(inputFolderFullPath, modelFileName);
|
||||
await fs.copyFile(modelImagePath, inputModelPath);
|
||||
|
||||
// Convert image with Chinese prompt and pose
|
||||
await processTwoImages(
|
||||
`请将图1中模特的姿势更改为图2的姿势。, ${chinesePrompt}`,
|
||||
modelFileName,
|
||||
`pose_${timestamp}.png`,
|
||||
`model_outfit_location_pose_${timestamp}.png`,
|
||||
outputDir,
|
||||
true
|
||||
);
|
||||
|
||||
// Step 5: Add handbag to model
|
||||
await processTwoImagesHandbag(
|
||||
'请将图1中的女性修改成手持图2的包。',
|
||||
`model_outfit_location_pose_${timestamp}.png`,
|
||||
`handbag_${timestamp}.png`,
|
||||
`model_outfit_location_handbag1_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
|
||||
await processTwoImagesHandbag(
|
||||
'请让图1的女性看起来像是在手里拿着图2的包。',
|
||||
`model_outfit_location_pose_${timestamp}.png`,
|
||||
`handbag_${timestamp}.png`,
|
||||
`model_outfit_location_handbag2_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
await processTwoImagesHandbag(
|
||||
'请将图1中的女性修改成双手拿着图2的包。',
|
||||
`model_outfit_location_pose_${timestamp}.png`,
|
||||
`handbag_${timestamp}.png`,
|
||||
`model_outfit_location_handbag3_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
await upscaleAndFix(
|
||||
`model_outfit_location_handbag1_${timestamp}.png`,
|
||||
`model_${timestamp}.png`,
|
||||
`model_outfit_location_handbag1_upscaled_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
await upscaleAndFix(
|
||||
`model_outfit_location_handbag2_${timestamp}.png`,
|
||||
`model_${timestamp}.png`,
|
||||
`model_outfit_location_handbag2_upscaled_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
await upscaleAndFix(
|
||||
`model_outfit_location_handbag3_${timestamp}.png`,
|
||||
`model_${timestamp}.png`,
|
||||
`model_outfit_location_handbag3_upscaled_${timestamp}.png`,
|
||||
outputDir
|
||||
);
|
||||
|
||||
|
||||
logger.info(`\n${'='.repeat(80)}`);
|
||||
logger.info(`ITERATION ${iteration} COMPLETED!`);
|
||||
logger.info(`Generated files are saved in: ${outputDir}`);
|
||||
logger.info(`${'='.repeat(80)}\n`);
|
||||
|
||||
} catch (error) {
|
||||
logger.error(`Error in iteration ${iteration}:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Main execution function with infinite iteration
|
||||
*/
|
||||
async function main() {
|
||||
let iteration = 1;
|
||||
|
||||
try {
|
||||
logger.info('Starting infinite processing loop...');
|
||||
logger.info('Press Ctrl+C to stop the process\n');
|
||||
|
||||
while (true) {
|
||||
await processIteration(iteration);
|
||||
iteration++;
|
||||
|
||||
// Small delay between iterations
|
||||
logger.info('Waiting 5 seconds before next iteration...\n');
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error in main execution:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute main function if this file is run directly
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
|
||||
export { processImage, convertImageWithPrompt, processTwoImages, processIteration, main };
|
||||
Reference in New Issue
Block a user