Skip to content
This repository was archived by the owner on Dec 19, 2017. It is now read-only.
This repository was archived by the owner on Dec 19, 2017. It is now read-only.

PutPart returning wrong size #256

@frodopwns

Description

@frodopwns

I can successfully use PutAll with a MultiPart upload and I can do a Single part upload with no trouble.

When I create a []bye of the size I want my parts to be (say 5000000) and I try to upload a 25mb file in 5mb parts each part thinks it is 25mb instead of 5mb.

When uploading a 15m file I expect to see

Uploading...
Processing 1 part of 3 and uploaded 5242880.0 bytes.
 Processing 2 part of 3 and uploaded 5242880.0 bytes.
 Processing 3 part of 3 and uploaded 5242880.0 bytes.

Instead I see:

Uploading...
Processing 1 part of 3 and uploaded 15728640 bytes.
 Processing 2 part of 3 and uploaded 15728640 bytes.
 Processing 3 part of 3 and uploaded 15728640 bytes.
Is this due to an issue with the file.Read(partBuffer)? Any help would be much appreciated.

I am using go 1.5.1 on a mac.

package main

import (
    "bufio"
    "fmt"
    "math"
    "net/http"
    "os"

    "github.com/mitchellh/goamz/aws"
    "github.com/mitchellh/goamz/s3"
)

func check(err error) {
    if err != nil {
        panic(err)
    }
}

func main() {
    fmt.Println("Test")

    auth, err := aws.GetAuth("XXXXX", "XXXXXXXXXX")
    check(err)

    client := s3.New(auth, aws.USWest2)

    b := s3.Bucket{
        S3:   client,
        Name: "some-bucket",
    }

    fileToBeUploaded := "testfile"
    file, err := os.Open(fileToBeUploaded)
    check(err)
    defer file.Close()

    fileInfo, _ := file.Stat()
    fileSize := fileInfo.Size()
    bytes := make([]byte, fileSize)

    // read into buffer
    buffer := bufio.NewReader(file)
    _, err = buffer.Read(bytes)
    check(err)
    filetype := http.DetectContentType(bytes)

    // set up for multipart upload
    multi, err := b.InitMulti("/"+fileToBeUploaded, filetype, s3.ACL("bucket-owner-read"))
    check(err)

    const fileChunk = 5242880 // 5MB
    totalPartsNum := uint64(math.Ceil(float64(fileSize) / float64(fileChunk)))
    parts := []s3.Part{}

    fmt.Println("Uploading...")
    for i := uint64(1); i < totalPartsNum; i++ {

        partSize := int(math.Min(fileChunk, float64(fileSize-int64(i*fileChunk))))
        partBuffer := make([]byte, partSize)

        _, err := file.Read(partBuffer)
        check(err)

        part, err := multi.PutPart(int(i), file) // write to S3 bucket part by part
        check(err)

        fmt.Printf("Processing %d part of %d and uploaded %d bytes.\n ", int(i), int(totalPartsNum), int(part.Size))
        parts = append(parts, part)
    }

    err = multi.Complete(parts)
    check(err)

    fmt.Println("\n\nPutPart upload completed")

}

Am I doing something wrong or is this a bug of some sort?

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions