needle_parse_upload.go 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253
  1. package needle
  2. import (
  3. "bytes"
  4. "crypto/md5"
  5. "encoding/base64"
  6. "fmt"
  7. "io"
  8. "mime"
  9. "net/http"
  10. "path"
  11. "path/filepath"
  12. "strconv"
  13. "strings"
  14. "github.com/seaweedfs/seaweedfs/weed/glog"
  15. "github.com/seaweedfs/seaweedfs/weed/util"
  16. )
  17. type ParsedUpload struct {
  18. FileName string
  19. Data []byte
  20. bytesBuffer *bytes.Buffer
  21. MimeType string
  22. PairMap map[string]string
  23. IsGzipped bool
  24. // IsZstd bool
  25. OriginalDataSize int
  26. ModifiedTime uint64
  27. Ttl *TTL
  28. IsChunkedFile bool
  29. UncompressedData []byte
  30. ContentMd5 string
  31. }
  32. func ParseUpload(r *http.Request, sizeLimit int64, bytesBuffer *bytes.Buffer) (pu *ParsedUpload, e error) {
  33. bytesBuffer.Reset()
  34. pu = &ParsedUpload{bytesBuffer: bytesBuffer}
  35. pu.PairMap = make(map[string]string)
  36. for k, v := range r.Header {
  37. if len(v) > 0 && strings.HasPrefix(k, PairNamePrefix) {
  38. pu.PairMap[k] = v[0]
  39. }
  40. }
  41. e = parseUpload(r, sizeLimit, pu)
  42. if e != nil {
  43. return
  44. }
  45. pu.ModifiedTime, _ = strconv.ParseUint(r.FormValue("ts"), 10, 64)
  46. pu.Ttl, _ = ReadTTL(r.FormValue("ttl"))
  47. pu.OriginalDataSize = len(pu.Data)
  48. pu.UncompressedData = pu.Data
  49. // println("received data", len(pu.Data), "isGzipped", pu.IsGzipped, "mime", pu.MimeType, "name", pu.FileName)
  50. if pu.IsGzipped {
  51. if unzipped, e := util.DecompressData(pu.Data); e == nil {
  52. pu.OriginalDataSize = len(unzipped)
  53. pu.UncompressedData = unzipped
  54. // println("ungzipped data size", len(unzipped))
  55. }
  56. } else {
  57. ext := filepath.Base(pu.FileName)
  58. mimeType := pu.MimeType
  59. if mimeType == "" {
  60. mimeType = http.DetectContentType(pu.Data)
  61. }
  62. // println("detected mimetype to", pu.MimeType)
  63. if mimeType == "application/octet-stream" {
  64. mimeType = ""
  65. }
  66. if shouldBeCompressed, iAmSure := util.IsCompressableFileType(ext, mimeType); shouldBeCompressed && iAmSure {
  67. // println("ext", ext, "iAmSure", iAmSure, "shouldBeCompressed", shouldBeCompressed, "mimeType", pu.MimeType)
  68. if compressedData, err := util.GzipData(pu.Data); err == nil {
  69. if len(compressedData)*10 < len(pu.Data)*9 {
  70. pu.Data = compressedData
  71. pu.IsGzipped = true
  72. }
  73. // println("gzipped data size", len(compressedData))
  74. }
  75. }
  76. }
  77. // md5
  78. h := md5.New()
  79. h.Write(pu.UncompressedData)
  80. pu.ContentMd5 = base64.StdEncoding.EncodeToString(h.Sum(nil))
  81. if expectedChecksum := r.Header.Get("Content-MD5"); expectedChecksum != "" {
  82. if expectedChecksum != pu.ContentMd5 {
  83. e = fmt.Errorf("Content-MD5 did not match md5 of file data expected [%s] received [%s] size %d", expectedChecksum, pu.ContentMd5, len(pu.UncompressedData))
  84. return
  85. }
  86. }
  87. return
  88. }
  89. func parseUpload(r *http.Request, sizeLimit int64, pu *ParsedUpload) (e error) {
  90. defer func() {
  91. if e != nil && r.Body != nil {
  92. io.Copy(io.Discard, r.Body)
  93. r.Body.Close()
  94. }
  95. }()
  96. contentType := r.Header.Get("Content-Type")
  97. var dataSize int64
  98. if r.Method == http.MethodPost && (contentType == "" || strings.Contains(contentType, "form-data")) {
  99. form, fe := r.MultipartReader()
  100. if fe != nil {
  101. glog.V(0).Infoln("MultipartReader [ERROR]", fe)
  102. e = fe
  103. return
  104. }
  105. // first multi-part item
  106. part, fe := form.NextPart()
  107. if fe != nil {
  108. glog.V(0).Infoln("Reading Multi part [ERROR]", fe)
  109. e = fe
  110. return
  111. }
  112. pu.FileName = part.FileName()
  113. if pu.FileName != "" {
  114. pu.FileName = path.Base(pu.FileName)
  115. }
  116. dataSize, e = pu.bytesBuffer.ReadFrom(io.LimitReader(part, sizeLimit+1))
  117. if e != nil {
  118. glog.V(0).Infoln("Reading Content [ERROR]", e)
  119. return
  120. }
  121. if dataSize == sizeLimit+1 {
  122. e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
  123. return
  124. }
  125. pu.Data = pu.bytesBuffer.Bytes()
  126. contentType = part.Header.Get("Content-Type")
  127. // if the filename is empty string, do a search on the other multi-part items
  128. for pu.FileName == "" {
  129. part2, fe := form.NextPart()
  130. if fe != nil {
  131. break // no more or on error, just safely break
  132. }
  133. fName := part2.FileName()
  134. // found the first <file type> multi-part has filename
  135. if fName != "" {
  136. pu.bytesBuffer.Reset()
  137. dataSize2, fe2 := pu.bytesBuffer.ReadFrom(io.LimitReader(part2, sizeLimit+1))
  138. if fe2 != nil {
  139. glog.V(0).Infoln("Reading Content [ERROR]", fe2)
  140. e = fe2
  141. return
  142. }
  143. if dataSize2 == sizeLimit+1 {
  144. e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
  145. return
  146. }
  147. // update
  148. pu.Data = pu.bytesBuffer.Bytes()
  149. pu.FileName = path.Base(fName)
  150. contentType = part.Header.Get("Content-Type")
  151. part = part2
  152. break
  153. }
  154. }
  155. pu.IsGzipped = part.Header.Get("Content-Encoding") == "gzip"
  156. // pu.IsZstd = part.Header.Get("Content-Encoding") == "zstd"
  157. } else {
  158. disposition := r.Header.Get("Content-Disposition")
  159. if strings.Contains(disposition, "name=") {
  160. if !strings.HasPrefix(disposition, "inline") && !strings.HasPrefix(disposition, "attachment") {
  161. disposition = "attachment; " + disposition
  162. }
  163. _, mediaTypeParams, err := mime.ParseMediaType(disposition)
  164. if err == nil {
  165. dpFilename, hasFilename := mediaTypeParams["filename"]
  166. dpName, hasName := mediaTypeParams["name"]
  167. if hasFilename {
  168. pu.FileName = dpFilename
  169. } else if hasName {
  170. pu.FileName = dpName
  171. }
  172. }
  173. } else {
  174. pu.FileName = ""
  175. }
  176. if pu.FileName != "" {
  177. pu.FileName = path.Base(pu.FileName)
  178. } else {
  179. pu.FileName = path.Base(r.URL.Path)
  180. }
  181. dataSize, e = pu.bytesBuffer.ReadFrom(io.LimitReader(r.Body, sizeLimit+1))
  182. if e != nil {
  183. glog.V(0).Infoln("Reading Content [ERROR]", e)
  184. return
  185. }
  186. if dataSize == sizeLimit+1 {
  187. e = fmt.Errorf("file over the limited %d bytes", sizeLimit)
  188. return
  189. }
  190. pu.Data = pu.bytesBuffer.Bytes()
  191. pu.MimeType = contentType
  192. pu.IsGzipped = r.Header.Get("Content-Encoding") == "gzip"
  193. // pu.IsZstd = r.Header.Get("Content-Encoding") == "zstd"
  194. }
  195. pu.IsChunkedFile, _ = strconv.ParseBool(r.FormValue("cm"))
  196. if !pu.IsChunkedFile {
  197. dotIndex := strings.LastIndex(pu.FileName, ".")
  198. ext, mtype := "", ""
  199. if dotIndex > 0 {
  200. ext = strings.ToLower(pu.FileName[dotIndex:])
  201. mtype = mime.TypeByExtension(ext)
  202. }
  203. if contentType != "" && contentType != "application/octet-stream" && mtype != contentType {
  204. pu.MimeType = contentType // only return mime type if not deducible
  205. } else if mtype != "" && pu.MimeType == "" && mtype != "application/octet-stream" {
  206. pu.MimeType = mtype
  207. }
  208. }
  209. return
  210. }