Merge pull request #16243 from hashicorp/jbardin/s3-errors

retry on s3 backend internal errors
This commit is contained in:
James Bardin 2017-10-04 09:26:26 -04:00 committed by GitHub
commit b20ab9f0bb
1 changed files with 27 additions and 12 deletions

View File

@ -22,7 +22,10 @@ import (
)
// Store the last saved serial in dynamo with this suffix for consistency checks.
const stateIDSuffix = "-md5"
const (
stateIDSuffix = "-md5"
s3ErrCodeInternalError = "InternalError"
)
type RemoteClient struct {
s3Client *s3.S3
@ -92,21 +95,33 @@ func (c *RemoteClient) Get() (payload *remote.Payload, err error) {
}
func (c *RemoteClient) get() (*remote.Payload, error) {
output, err := c.s3Client.GetObject(&s3.GetObjectInput{
var output *s3.GetObjectOutput
var err error
// we immediately retry on an internal error, as those are usually transient
maxRetries := 2
for retryCount := 0; ; retryCount++ {
output, err = c.s3Client.GetObject(&s3.GetObjectInput{
Bucket: &c.bucketName,
Key: &c.path,
})
if err != nil {
if awserr := err.(awserr.Error); awserr != nil {
if awserr.Code() == "NoSuchKey" {
if awserr, ok := err.(awserr.Error); ok {
switch awserr.Code() {
case s3.ErrCodeNoSuchKey:
return nil, nil
} else {
case s3ErrCodeInternalError:
if retryCount > maxRetries {
return nil, err
}
} else {
log.Println("[WARN] s3 internal error, retrying...")
continue
}
}
return nil, err
}
break
}
defer output.Body.Close()