2016-08-08 02:56:44 +02:00
|
|
|
package archive
|
|
|
|
|
|
|
|
import (
|
|
|
|
"crypto/sha1"
|
2016-10-25 16:59:06 +02:00
|
|
|
"crypto/sha256"
|
|
|
|
"encoding/base64"
|
2016-08-08 02:56:44 +02:00
|
|
|
"encoding/hex"
|
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
2016-08-18 17:36:27 +02:00
|
|
|
"path"
|
|
|
|
|
|
|
|
"github.com/hashicorp/terraform/helper/schema"
|
2016-08-08 02:56:44 +02:00
|
|
|
)
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
func dataSourceFile() *schema.Resource {
|
2016-08-08 02:56:44 +02:00
|
|
|
return &schema.Resource{
|
2016-10-25 16:59:06 +02:00
|
|
|
Read: dataSourceFileRead,
|
2016-08-08 02:56:44 +02:00
|
|
|
|
|
|
|
Schema: map[string]*schema.Schema{
|
|
|
|
"type": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Required: true,
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
|
|
|
"source_content": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
ConflictsWith: []string{"source_file", "source_dir"},
|
|
|
|
},
|
|
|
|
"source_content_filename": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
ConflictsWith: []string{"source_file", "source_dir"},
|
|
|
|
},
|
|
|
|
"source_file": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
ConflictsWith: []string{"source_content", "source_content_filename", "source_dir"},
|
|
|
|
},
|
|
|
|
"source_dir": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
ConflictsWith: []string{"source_content", "source_content_filename", "source_file"},
|
|
|
|
},
|
|
|
|
"output_path": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Required: true,
|
|
|
|
},
|
|
|
|
"output_size": &schema.Schema{
|
|
|
|
Type: schema.TypeInt,
|
|
|
|
Computed: true,
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
|
|
|
"output_sha": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Computed: true,
|
|
|
|
ForceNew: true,
|
|
|
|
Description: "SHA1 checksum of output file",
|
|
|
|
},
|
2016-10-25 16:59:06 +02:00
|
|
|
"output_base64sha256": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Computed: true,
|
|
|
|
ForceNew: true,
|
|
|
|
Description: "Base64 Encoded SHA256 checksum of output file",
|
|
|
|
},
|
2016-08-08 02:56:44 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
func dataSourceFileRead(d *schema.ResourceData, meta interface{}) error {
|
|
|
|
outputPath := d.Get("output_path").(string)
|
|
|
|
|
|
|
|
outputDirectory := path.Dir(outputPath)
|
|
|
|
if outputDirectory != "" {
|
|
|
|
if _, err := os.Stat(outputDirectory); err != nil {
|
|
|
|
if err := os.MkdirAll(outputDirectory, 0755); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := archive(d); err != nil {
|
2016-08-08 02:56:44 +02:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
// Generate archived file stats
|
2016-08-18 17:36:27 +02:00
|
|
|
fi, err := os.Stat(outputPath)
|
2016-10-25 16:59:06 +02:00
|
|
|
if err != nil {
|
|
|
|
return err
|
2016-08-08 02:56:44 +02:00
|
|
|
}
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
sha1, base64sha256, err := genFileShas(outputPath)
|
2016-08-08 02:56:44 +02:00
|
|
|
if err != nil {
|
2016-10-25 16:59:06 +02:00
|
|
|
|
|
|
|
return fmt.Errorf("could not generate file checksum sha256: %s", err)
|
2016-08-08 02:56:44 +02:00
|
|
|
}
|
2016-10-25 16:59:06 +02:00
|
|
|
d.Set("output_sha", sha1)
|
|
|
|
d.Set("output_base64sha256", base64sha256)
|
|
|
|
|
2016-08-08 02:56:44 +02:00
|
|
|
d.Set("output_size", fi.Size())
|
|
|
|
d.SetId(d.Get("output_sha").(string))
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
func archive(d *schema.ResourceData) error {
|
2016-08-08 02:56:44 +02:00
|
|
|
archiveType := d.Get("type").(string)
|
|
|
|
outputPath := d.Get("output_path").(string)
|
|
|
|
|
|
|
|
archiver := getArchiver(archiveType, outputPath)
|
|
|
|
if archiver == nil {
|
|
|
|
return fmt.Errorf("archive type not supported: %s", archiveType)
|
|
|
|
}
|
|
|
|
|
|
|
|
if dir, ok := d.GetOk("source_dir"); ok {
|
|
|
|
if err := archiver.ArchiveDir(dir.(string)); err != nil {
|
|
|
|
return fmt.Errorf("error archiving directory: %s", err)
|
|
|
|
}
|
|
|
|
} else if file, ok := d.GetOk("source_file"); ok {
|
|
|
|
if err := archiver.ArchiveFile(file.(string)); err != nil {
|
|
|
|
return fmt.Errorf("error archiving file: %s", err)
|
|
|
|
}
|
|
|
|
} else if filename, ok := d.GetOk("source_content_filename"); ok {
|
|
|
|
content := d.Get("source_content").(string)
|
|
|
|
if err := archiver.ArchiveContent([]byte(content), filename.(string)); err != nil {
|
|
|
|
return fmt.Errorf("error archiving content: %s", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return fmt.Errorf("one of 'source_dir', 'source_file', 'source_content_filename' must be specified")
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2016-10-25 16:59:06 +02:00
|
|
|
func genFileShas(filename string) (string, string, error) {
|
2016-08-08 02:56:44 +02:00
|
|
|
data, err := ioutil.ReadFile(filename)
|
|
|
|
if err != nil {
|
2016-10-25 16:59:06 +02:00
|
|
|
return "", "", fmt.Errorf("could not compute file '%s' checksum: %s", filename, err)
|
2016-08-08 02:56:44 +02:00
|
|
|
}
|
|
|
|
h := sha1.New()
|
|
|
|
h.Write([]byte(data))
|
2016-10-25 16:59:06 +02:00
|
|
|
sha1 := hex.EncodeToString(h.Sum(nil))
|
|
|
|
|
|
|
|
h256 := sha256.New()
|
|
|
|
h256.Write([]byte(data))
|
|
|
|
shaSum := h256.Sum(nil)
|
|
|
|
sha256base64 := base64.StdEncoding.EncodeToString(shaSum[:])
|
|
|
|
|
|
|
|
return sha1, sha256base64, nil
|
2016-08-08 02:56:44 +02:00
|
|
|
}
|