upload.mjs 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. import { execa } from "execa"
  2. import path from "path"
  3. import { fileURLToPath } from "url"
  4. import fs from "fs"
  5. import { S3Client, PutObjectCommand } from "@aws-sdk/client-s3"
  6. import { Upload } from "@aws-sdk/lib-storage"
  7. import { PassThrough, Readable } from "stream"
  8. import { WritableStreamBuffer } from "stream-buffers"
  9. const __filename = fileURLToPath(import.meta.url)
  10. const __dirname = path.dirname(__filename)
  11. async function uploadUml(plantUml) {
  12. const s3 = new S3Client({
  13. region: "oss-cn-hangzhou",
  14. endpoint: "https://oss-cn-hangzhou.aliyuncs.com",
  15. credentials: {
  16. accessKeyId: "PXzJyah5rZfWHIIH",
  17. secretAccessKey: "e1MS6j0wypXJrw8CM0hObZu8qKbfah"
  18. }
  19. })
  20. const stream = new PassThrough()
  21. const key = `uml/${Date.now()}.png`
  22. const upload = new Upload({
  23. client: s3,
  24. params: {
  25. ACL: "public-read",
  26. Bucket: "nebuai",
  27. Key: key,
  28. Body: stream
  29. }
  30. })
  31. const p = execa("java", [
  32. "-jar",
  33. "-Djava.awt.headless=true",
  34. "-DPLANTUML_LIMIT_SIZE=8192",
  35. `${path.join(__dirname, "plantuml-1.2023.12.jar")}`,
  36. "-config",
  37. `${path.join(__dirname, "plantuml.cfg")}`,
  38. "-tpng",
  39. "-pipe",
  40. "-fastfail",
  41. "-noerror"
  42. ])
  43. Readable.from(plantUml).pipe(p.stdin)
  44. const err = new WritableStreamBuffer()
  45. p.pipeStdout(stream)
  46. p.pipeStderr(err)
  47. try {
  48. await p
  49. } catch (error) {
  50. throw new Error(err.getContents().toString())
  51. }
  52. await upload.done()
  53. return `https://nebuai.oss-cn-hangzhou.aliyuncs.com/${key}`
  54. }
  55. export { uploadUml }