Merge pull request #16290 from hashicorp/jbardin/s3-errors

retry on s3 state upload errors
This commit is contained in:
James Bardin 2017-10-17 16:04:14 -04:00 committed by GitHub
commit 31912956ce
1 changed files with 36 additions and 22 deletions

View File

@ -149,6 +149,9 @@ func (c *RemoteClient) Put(data []byte) error {
contentType := "application/json" contentType := "application/json"
contentLength := int64(len(data)) contentLength := int64(len(data))
// we immediately retry on an internal error, as those are usually transient
maxRetries := 2
for retryCount := 0; ; retryCount++ {
i := &s3.PutObjectInput{ i := &s3.PutObjectInput{
ContentType: &contentType, ContentType: &contentType,
ContentLength: &contentLength, ContentLength: &contentLength,
@ -174,7 +177,18 @@ func (c *RemoteClient) Put(data []byte) error {
_, err := c.s3Client.PutObject(i) _, err := c.s3Client.PutObject(i)
if err != nil { if err != nil {
return fmt.Errorf("Failed to upload state: %v", err) if awserr, ok := err.(awserr.Error); ok {
if awserr.Code() == s3ErrCodeInternalError {
if retryCount > maxRetries {
return fmt.Errorf("failed to upload state: %s", err)
}
log.Println("[WARN] s3 internal error, retrying...")
continue
}
}
return fmt.Errorf("failed to upload state: %s", err)
}
break
} }
sum := md5.Sum(data) sum := md5.Sum(data)