download.go 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. package command
  2. import (
  3. "fmt"
  4. "io"
  5. "io/ioutil"
  6. "os"
  7. "path"
  8. "strings"
  9. "github.com/chrislusf/seaweedfs/weed/operation"
  10. "github.com/chrislusf/seaweedfs/weed/util"
  11. )
  12. var (
  13. d DownloadOptions
  14. )
  15. type DownloadOptions struct {
  16. server *string
  17. dir *string
  18. }
  19. func init() {
  20. cmdDownload.Run = runDownload // break init cycle
  21. d.server = cmdDownload.Flag.String("server", "localhost:9333", "SeaweedFS master location")
  22. d.dir = cmdDownload.Flag.String("dir", ".", "Download the whole folder recursively if specified.")
  23. }
  24. var cmdDownload = &Command{
  25. UsageLine: "download -server=localhost:9333 -dir=one_directory fid1 [fid2 fid3 ...]",
  26. Short: "download files by file id",
  27. Long: `download files by file id.
  28. Usually you just need to use curl to lookup the file's volume server, and then download them directly.
  29. This download tool combine the two steps into one.
  30. What's more, if you use "weed upload -maxMB=..." option to upload a big file divided into chunks, you can
  31. use this tool to download the chunks and merge them automatically.
  32. `,
  33. }
  34. func runDownload(cmd *Command, args []string) bool {
  35. for _, fid := range args {
  36. if e := downloadToFile(*d.server, fid, util.ResolvePath(*d.dir)); e != nil {
  37. fmt.Println("Download Error: ", fid, e)
  38. }
  39. }
  40. return true
  41. }
  42. func downloadToFile(server, fileId, saveDir string) error {
  43. fileUrl, lookupError := operation.LookupFileId(server, fileId)
  44. if lookupError != nil {
  45. return lookupError
  46. }
  47. filename, _, rc, err := util.DownloadFile(fileUrl)
  48. if err != nil {
  49. return err
  50. }
  51. defer rc.Close()
  52. if filename == "" {
  53. filename = fileId
  54. }
  55. isFileList := false
  56. if strings.HasSuffix(filename, "-list") {
  57. // old command compatible
  58. isFileList = true
  59. filename = filename[0 : len(filename)-len("-list")]
  60. }
  61. f, err := os.OpenFile(path.Join(saveDir, filename), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
  62. if err != nil {
  63. io.Copy(ioutil.Discard, rc)
  64. return err
  65. }
  66. defer f.Close()
  67. if isFileList {
  68. content, err := ioutil.ReadAll(rc)
  69. if err != nil {
  70. return err
  71. }
  72. fids := strings.Split(string(content), "\n")
  73. for _, partId := range fids {
  74. var n int
  75. _, part, err := fetchContent(*d.server, partId)
  76. if err == nil {
  77. n, err = f.Write(part)
  78. }
  79. if err == nil && n < len(part) {
  80. err = io.ErrShortWrite
  81. }
  82. if err != nil {
  83. return err
  84. }
  85. }
  86. } else {
  87. if _, err = io.Copy(f, rc); err != nil {
  88. return err
  89. }
  90. }
  91. return nil
  92. }
  93. func fetchContent(server string, fileId string) (filename string, content []byte, e error) {
  94. fileUrl, lookupError := operation.LookupFileId(server, fileId)
  95. if lookupError != nil {
  96. return "", nil, lookupError
  97. }
  98. var rc io.ReadCloser
  99. if filename, _, rc, e = util.DownloadFile(fileUrl); e != nil {
  100. return "", nil, e
  101. }
  102. content, e = ioutil.ReadAll(rc)
  103. rc.Close()
  104. return
  105. }
  106. func WriteFile(filename string, data []byte, perm os.FileMode) error {
  107. f, err := os.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm)
  108. if err != nil {
  109. return err
  110. }
  111. n, err := f.Write(data)
  112. f.Close()
  113. if err == nil && n < len(data) {
  114. err = io.ErrShortWrite
  115. }
  116. return err
  117. }