testupload.ts 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3'
  2. import { Upload } from '@aws-sdk/lib-storage'
  3. import { PassThrough, Readable } from 'stream'
  4. import { WritableStreamBuffer } from 'stream-buffers'
  5. import * as path from 'path'
  6. async function uploadDoc(title, content) {
  7. const s3 = new S3Client({
  8. region: 'oss-cn-hangzhou',
  9. endpoint: `https://oss-cn-hangzhou.aliyuncs.com`,
  10. credentials: {
  11. accessKeyId: 'PXzJyah5rZfWHIIH',
  12. secretAccessKey: 'e1MS6j0wypXJrw8CM0hObZu8qKbfah'
  13. }
  14. })
  15. const stream = new PassThrough()
  16. const key = `doc/${title}_${Date.now()}.docx`
  17. const upload = new Upload({
  18. client: s3,
  19. params: {
  20. ACL: 'public-read',
  21. Bucket: 'nebuai',
  22. Key: key,
  23. Body: stream
  24. }
  25. })
  26. const { execa } = await (eval('import("execa")') as Promise<typeof import('execa')>)
  27. const p = execa('pandoc', ['-f', 'markdown', '-t', 'docx'])
  28. Readable.from(content).pipe(p.stdin)
  29. const err = new WritableStreamBuffer()
  30. p.pipeStdout(stream)
  31. p.pipeStderr(err)
  32. try {
  33. await p
  34. } catch (error) {
  35. throw new Error(err.getContents().toString())
  36. }
  37. await upload.done()
  38. return `https://nebuai.oss-cn-hangzhou.aliyuncs.com/${key}`
  39. }
  40. // uploadDoc('test', '# test').then((res) => {
  41. // console.log(res)
  42. // })
  43. let paper = new WritableStreamBuffer()
  44. paper.write('test')
  45. console.log(paper.getContentsAsString('utf8'))