adding (backwards compatible) support for providing multiple source filename and content (#11271)
This commit is contained in:
parent
87d98b1ff1
commit
03b6dfd7e3
|
@ -9,6 +9,7 @@ type Archiver interface {
|
|||
ArchiveContent(content []byte, infilename string) error
|
||||
ArchiveFile(infilename string) error
|
||||
ArchiveDir(indirname string) error
|
||||
ArchiveMultiple(content map[string][]byte) error
|
||||
}
|
||||
|
||||
type ArchiverBuilder func(filepath string) Archiver
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package archive
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
|
@ -11,6 +12,7 @@ import (
|
|||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/hashicorp/terraform/helper/hashcode"
|
||||
"github.com/hashicorp/terraform/helper/schema"
|
||||
)
|
||||
|
||||
|
@ -24,6 +26,33 @@ func dataSourceFile() *schema.Resource {
|
|||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"source": &schema.Schema{
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"content": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
"filename": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
ConflictsWith: []string{"source_file", "source_dir", "source_content", "source_content_filename"},
|
||||
Set: func(v interface{}) int {
|
||||
var buf bytes.Buffer
|
||||
m := v.(map[string]interface{})
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["filename"].(string)))
|
||||
buf.WriteString(fmt.Sprintf("%s-", m["content"].(string)))
|
||||
return hashcode.String(buf.String())
|
||||
},
|
||||
},
|
||||
"source_content": &schema.Schema{
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
|
@ -138,6 +167,16 @@ func archive(d *schema.ResourceData) error {
|
|||
if err := archiver.ArchiveContent([]byte(content), filename.(string)); err != nil {
|
||||
return fmt.Errorf("error archiving content: %s", err)
|
||||
}
|
||||
} else if v, ok := d.GetOk("source"); ok {
|
||||
vL := v.(*schema.Set).List()
|
||||
content := make(map[string][]byte)
|
||||
for _, v := range vL {
|
||||
src := v.(map[string]interface{})
|
||||
content[src["filename"].(string)] = []byte(src["content"].(string))
|
||||
}
|
||||
if err := archiver.ArchiveMultiple(content); err != nil {
|
||||
return fmt.Errorf("error archiving content: %s", err)
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified")
|
||||
}
|
||||
|
|
|
@ -51,6 +51,13 @@ func TestAccArchiveFile_Basic(t *testing.T) {
|
|||
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
|
||||
),
|
||||
},
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileMultiConfig,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
testAccArchiveFileExists("zip_file_acc_test.zip", &fileSize),
|
||||
r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize),
|
||||
),
|
||||
},
|
||||
r.TestStep{
|
||||
Config: testAccArchiveFileOutputPath,
|
||||
Check: r.ComposeTestCheckFunc(
|
||||
|
@ -107,3 +114,14 @@ data "archive_file" "foo" {
|
|||
output_path = "zip_file_acc_test.zip"
|
||||
}
|
||||
`
|
||||
|
||||
var testAccArchiveFileMultiConfig = `
|
||||
data "archive_file" "foo" {
|
||||
type = "zip"
|
||||
source {
|
||||
filename = "content.txt"
|
||||
content = "This is some content"
|
||||
}
|
||||
output_path = "zip_file_acc_test.zip"
|
||||
}
|
||||
`
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type ZipArchiver struct {
|
||||
|
@ -85,6 +86,34 @@ func (a *ZipArchiver) ArchiveDir(indirname string) error {
|
|||
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) ArchiveMultiple(content map[string][]byte) error {
|
||||
if err := a.open(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer a.close()
|
||||
|
||||
// Ensure files are processed in the same order so hashes don't change
|
||||
keys := make([]string, len(content))
|
||||
i := 0
|
||||
for k := range content {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
||||
for _, filename := range keys {
|
||||
f, err := a.writer.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = f.Write(content[filename])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *ZipArchiver) open() error {
|
||||
f, err := os.Create(a.filepath)
|
||||
if err != nil {
|
||||
|
|
|
@ -44,6 +44,23 @@ func TestZipArchiver_Dir(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestZipArchiver_Multiple(t *testing.T) {
|
||||
zipfilepath := "archive-content.zip"
|
||||
content := map[string][]byte{
|
||||
"file1.txt": []byte("This is file 1"),
|
||||
"file2.txt": []byte("This is file 2"),
|
||||
"file3.txt": []byte("This is file 3"),
|
||||
}
|
||||
|
||||
archiver := NewZipArchiver(zipfilepath)
|
||||
if err := archiver.ArchiveMultiple(content); err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ensureContents(t, zipfilepath, content)
|
||||
|
||||
}
|
||||
|
||||
func ensureContents(t *testing.T, zipfilepath string, wants map[string][]byte) {
|
||||
r, err := zip.OpenReader(zipfilepath)
|
||||
if err != nil {
|
||||
|
|
Loading…
Reference in New Issue