asar.mjs 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. import pickle from 'chromium-pickle-js'
  2. import path from 'node:path'
  3. import { UINT64 } from 'cuint'
  4. import fs from 'node:fs'
  5. /**
  6. * Based of express-serve-asar (https://github.com/toyobayashi/express-serve-asar)
  7. * by Fenglin Li (https://github.com/toyobayashi)
  8. */
  9. export default {
  10. fdCache: {},
  11. async serve (pkgName, req, res, next) {
  12. const packages = {
  13. twemoji: path.join(WIKI.ROOTPATH, 'assets/svg/twemoji.asar')
  14. }
  15. const file = this.readFilesystemSync(packages[pkgName])
  16. const { filesystem, fd } = file
  17. const info = filesystem.getFile(req.path.substring(1))
  18. if (info) {
  19. res.set({
  20. 'Content-Type': 'image/svg+xml',
  21. 'Content-Length': info.size
  22. })
  23. fs.createReadStream('', {
  24. fd,
  25. autoClose: false,
  26. start: 8 + filesystem.headerSize + parseInt(info.offset, 10),
  27. end: 8 + filesystem.headerSize + parseInt(info.offset, 10) + info.size - 1
  28. }).on('error', (err) => {
  29. WIKI.logger.warn(err)
  30. res.sendStatus(404)
  31. }).pipe(res.status(200))
  32. } else {
  33. res.sendStatus(404)
  34. }
  35. },
  36. async unload () {
  37. const fds = Object.values(this.fdCache)
  38. if (fds.length > 0) {
  39. WIKI.logger.info('Closing ASAR file descriptors...')
  40. const closeAsync = require('util').promisify(fs.close)
  41. await Promise.all(fds.map(x => closeAsync(x.fd)))
  42. this.fdCache = {}
  43. }
  44. },
  45. readArchiveHeaderSync (fd) {
  46. let size
  47. let headerBuf
  48. const sizeBuf = Buffer.alloc(8)
  49. if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
  50. throw new Error('Unable to read header size')
  51. }
  52. const sizePickle = pickle.createFromBuffer(sizeBuf)
  53. size = sizePickle.createIterator().readUInt32()
  54. headerBuf = Buffer.alloc(size)
  55. if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
  56. throw new Error('Unable to read header')
  57. }
  58. const headerPickle = pickle.createFromBuffer(headerBuf)
  59. const header = headerPickle.createIterator().readString()
  60. return { header: JSON.parse(header), headerSize: size }
  61. },
  62. readFilesystemSync (archive) {
  63. if (!this.fdCache[archive]) {
  64. const fd = fs.openSync(archive, 'r')
  65. const header = this.readArchiveHeaderSync(fd)
  66. const filesystem = new Filesystem(archive)
  67. filesystem.header = header.header
  68. filesystem.headerSize = header.headerSize
  69. this.fdCache[archive] = {
  70. fd,
  71. filesystem
  72. }
  73. }
  74. return this.fdCache[archive]
  75. }
  76. }
  77. class Filesystem {
  78. constructor (src) {
  79. this.src = path.resolve(src)
  80. this.header = { files: {} }
  81. this.offset = UINT64(0)
  82. }
  83. searchNodeFromDirectory (p) {
  84. let json = this.header
  85. const dirs = p.split(path.sep)
  86. for (const dir of dirs) {
  87. if (dir !== '.') {
  88. json = json.files[dir]
  89. }
  90. }
  91. return json
  92. }
  93. getNode (p) {
  94. const node = this.searchNodeFromDirectory(path.dirname(p))
  95. const name = path.basename(p)
  96. if (name) {
  97. return node.files[name]
  98. } else {
  99. return node
  100. }
  101. }
  102. getFile (p, followLinks) {
  103. followLinks = typeof followLinks === 'undefined' ? true : followLinks
  104. const info = this.getNode(p)
  105. if (!info) {
  106. return false
  107. }
  108. if (info.link && followLinks) {
  109. return this.getFile(info.link)
  110. } else {
  111. return info
  112. }
  113. }
  114. }