Golang io.copy twice on the request body
I would suggest using an io.TeeReader
if you want to push all reads from the blob through the sha1 concurrently.
bodyReader := io.TeeReader(body, hash)
Now as the bodyReader is consumed during upload, the hash is automatically updated.
You can't do that directly but you can write a wrapper that does the hashing on io.Copy
// this works for either a reader or writer,
// but if you use both in the same time the hash will be wrong.
type Hasher struct {
io.Writer
io.Reader
hash.Hash
Size uint64
}
func (h *Hasher) Write(p []byte) (n int, err error) {
n, err = h.Writer.Write(p)
h.Hash.Write(p)
h.Size += uint64(n)
return
}
func (h *Hasher) Read(p []byte) (n int, err error) {
n, err = h.Reader.Read(p)
h.Hash.Write(p[:n]) //on error n is gonna be 0 so this is still safe.
return
}
func (h *Hasher) Sum() string {
return hex.EncodeToString(h.Hash.Sum(nil))
}
func (h *UploadHandle) Read() (io.Reader, string, int64, error) {
var b bytes.Buffer
hashedReader := &Hasher{Reader: h.Contents, Hash: sha1.New()}
n, err := io.Copy(&b, hashedReader)
if err != nil {
return nil, "", 0, err
}
return &b, hashedReader.Sum(), n, nil
}
// updated version based on @Dustin's comment since I complete forgot io.TeeReader
existed.
func (h *UploadHandle) Read() (io.Reader, string, int64, error) {
var b bytes.Buffer
hash := sha1.New()
n, err := io.Copy(&b, io.TeeReader(h.Contents, hash))
if err != nil {
return nil, "", 0, err
}
return &b, hex.EncodeToString(hash.Sum(nil)), n, nil
}
You have two options.
The most direct way is to use io.MultiWriter
.
But if you need the hash to produce the multipart output, then you will have to copy to a bytes.Buffer
and then write the buffer back to each writer.