aboutsummaryrefslogtreecommitdiff
path: root/vendor/github.com/hashicorp/vault/helper
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/hashicorp/vault/helper')
-rw-r--r--vendor/github.com/hashicorp/vault/helper/compressutil/compress.go159
-rw-r--r--vendor/github.com/hashicorp/vault/helper/jsonutil/json.go99
2 files changed, 258 insertions, 0 deletions
diff --git a/vendor/github.com/hashicorp/vault/helper/compressutil/compress.go b/vendor/github.com/hashicorp/vault/helper/compressutil/compress.go
new file mode 100644
index 0000000..e485f2f
--- /dev/null
+++ b/vendor/github.com/hashicorp/vault/helper/compressutil/compress.go
@@ -0,0 +1,159 @@
+package compressutil
+
+import (
+ "bytes"
+ "compress/gzip"
+ "compress/lzw"
+ "fmt"
+ "io"
+)
+
+const (
+ // A byte value used as a canary prefix for the compressed information
+ // which is used to distinguish if a JSON input is compressed or not.
+ // The value of this constant should not be a first character of any
+ // valid JSON string.
+
+ // Byte value used as canary when using Gzip format
+ CompressionCanaryGzip byte = 'G'
+
+ // Byte value used as canary when using Lzw format
+ CompressionCanaryLzw byte = 'L'
+
+ CompressionTypeLzw = "lzw"
+
+ CompressionTypeGzip = "gzip"
+)
+
+// CompressionConfig is used to select a compression type to be performed by
+// Compress and Decompress utilities.
+// Supported types are:
+// * CompressionTypeLzw
+// * CompressionTypeGzip
+//
+// When using CompressionTypeGzip, the compression levels can also be chosen:
+// * gzip.DefaultCompression
+// * gzip.BestSpeed
+// * gzip.BestCompression
+type CompressionConfig struct {
+ // Type of the compression algorithm to be used
+ Type string
+
+ // When using Gzip format, the compression level to employ
+ GzipCompressionLevel int
+}
+
+// Compress places the canary byte in a buffer and uses the same buffer to fill
+// in the compressed information of the given input. The configuration supports
+// two type of compression: LZW and Gzip. When using Gzip compression format,
+// if GzipCompressionLevel is not specified, the 'gzip.DefaultCompression' will
+// be assumed.
+func Compress(data []byte, config *CompressionConfig) ([]byte, error) {
+ var buf bytes.Buffer
+ var writer io.WriteCloser
+ var err error
+
+ if config == nil {
+ return nil, fmt.Errorf("config is nil")
+ }
+
+ // Write the canary into the buffer and create writer to compress the
+ // input data based on the configured type
+ switch config.Type {
+ case CompressionTypeLzw:
+ buf.Write([]byte{CompressionCanaryLzw})
+
+ writer = lzw.NewWriter(&buf, lzw.LSB, 8)
+ case CompressionTypeGzip:
+ buf.Write([]byte{CompressionCanaryGzip})
+
+ switch {
+ case config.GzipCompressionLevel == gzip.BestCompression,
+ config.GzipCompressionLevel == gzip.BestSpeed,
+ config.GzipCompressionLevel == gzip.DefaultCompression:
+ // These are valid compression levels
+ default:
+ // If compression level is set to NoCompression or to
+ // any invalid value, fallback to Defaultcompression
+ config.GzipCompressionLevel = gzip.DefaultCompression
+ }
+ writer, err = gzip.NewWriterLevel(&buf, config.GzipCompressionLevel)
+ default:
+ return nil, fmt.Errorf("unsupported compression type")
+ }
+ if err != nil {
+ return nil, fmt.Errorf("failed to create a compression writer; err: %v", err)
+ }
+
+ if writer == nil {
+ return nil, fmt.Errorf("failed to create a compression writer")
+ }
+
+ // Compress the input and place it in the same buffer containing the
+ // canary byte.
+ if _, err = writer.Write(data); err != nil {
+ return nil, fmt.Errorf("failed to compress input data; err: %v", err)
+ }
+
+ // Close the io.WriteCloser
+ if err = writer.Close(); err != nil {
+ return nil, err
+ }
+
+ // Return the compressed bytes with canary byte at the start
+ return buf.Bytes(), nil
+}
+
+// Decompress checks if the first byte in the input matches the canary byte.
+// If the first byte is a canary byte, then the input past the canary byte
+// will be decompressed using the method specified in the given configuration.
+// If the first byte isn't a canary byte, then the utility returns a boolean
+// value indicating that the input was not compressed.
+func Decompress(data []byte) ([]byte, bool, error) {
+ var err error
+ var reader io.ReadCloser
+ if data == nil || len(data) == 0 {
+ return nil, false, fmt.Errorf("'data' being decompressed is empty")
+ }
+
+ switch {
+ case data[0] == CompressionCanaryGzip:
+ // If the first byte matches the canary byte, remove the canary
+ // byte and try to decompress the data that is after the canary.
+ if len(data) < 2 {
+ return nil, false, fmt.Errorf("invalid 'data' after the canary")
+ }
+ data = data[1:]
+ reader, err = gzip.NewReader(bytes.NewReader(data))
+ case data[0] == CompressionCanaryLzw:
+ // If the first byte matches the canary byte, remove the canary
+ // byte and try to decompress the data that is after the canary.
+ if len(data) < 2 {
+ return nil, false, fmt.Errorf("invalid 'data' after the canary")
+ }
+ data = data[1:]
+ reader = lzw.NewReader(bytes.NewReader(data), lzw.LSB, 8)
+ default:
+ // If the first byte doesn't match the canary byte, it means
+ // that the content was not compressed at all. Indicate the
+ // caller that the input was not compressed.
+ return nil, true, nil
+ }
+ if err != nil {
+ return nil, false, fmt.Errorf("failed to create a compression reader; err: %v", err)
+ }
+ if reader == nil {
+ return nil, false, fmt.Errorf("failed to create a compression reader")
+ }
+
+ // Close the io.ReadCloser
+ defer reader.Close()
+
+ // Read all the compressed data into a buffer
+ var buf bytes.Buffer
+ if _, err = io.Copy(&buf, reader); err != nil {
+ return nil, false, err
+ }
+
+ return buf.Bytes(), false, nil
+}
diff --git a/vendor/github.com/hashicorp/vault/helper/jsonutil/json.go b/vendor/github.com/hashicorp/vault/helper/jsonutil/json.go
new file mode 100644
index 0000000..a96745b
--- /dev/null
+++ b/vendor/github.com/hashicorp/vault/helper/jsonutil/json.go
@@ -0,0 +1,99 @@
+package jsonutil
+
+import (
+ "bytes"
+ "compress/gzip"
+ "encoding/json"
+ "fmt"
+ "io"
+
+ "github.com/hashicorp/vault/helper/compressutil"
+)
+
+// Encodes/Marshals the given object into JSON
+func EncodeJSON(in interface{}) ([]byte, error) {
+ if in == nil {
+ return nil, fmt.Errorf("input for encoding is nil")
+ }
+ var buf bytes.Buffer
+ enc := json.NewEncoder(&buf)
+ if err := enc.Encode(in); err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// EncodeJSONAndCompress encodes the given input into JSON and compresses the
+// encoded value (using Gzip format BestCompression level, by default). A
+// canary byte is placed at the beginning of the returned bytes for the logic
+// in decompression method to identify compressed input.
+func EncodeJSONAndCompress(in interface{}, config *compressutil.CompressionConfig) ([]byte, error) {
+ if in == nil {
+ return nil, fmt.Errorf("input for encoding is nil")
+ }
+
+ // First JSON encode the given input
+ encodedBytes, err := EncodeJSON(in)
+ if err != nil {
+ return nil, err
+ }
+
+ if config == nil {
+ config = &compressutil.CompressionConfig{
+ Type: compressutil.CompressionTypeGzip,
+ GzipCompressionLevel: gzip.BestCompression,
+ }
+ }
+
+ return compressutil.Compress(encodedBytes, config)
+}
+
+// DecodeJSON tries to decompress the given data. The call to decompress, fails
+// if the content was not compressed in the first place, which is identified by
+// a canary byte before the compressed data. If the data is not compressed, it
+// is JSON decoded directly. Otherwise the decompressed data will be JSON
+// decoded.
+func DecodeJSON(data []byte, out interface{}) error {
+ if data == nil || len(data) == 0 {
+ return fmt.Errorf("'data' being decoded is nil")
+ }
+ if out == nil {
+ return fmt.Errorf("output parameter 'out' is nil")
+ }
+
+ // Decompress the data if it was compressed in the first place
+ decompressedBytes, uncompressed, err := compressutil.Decompress(data)
+ if err != nil {
+ return fmt.Errorf("failed to decompress JSON: err: %v", err)
+ }
+ if !uncompressed && (decompressedBytes == nil || len(decompressedBytes) == 0) {
+ return fmt.Errorf("decompressed data being decoded is invalid")
+ }
+
+ // If the input supplied failed to contain the compression canary, it
+ // will be notified by the compression utility. Decode the decompressed
+ // input.
+ if !uncompressed {
+ data = decompressedBytes
+ }
+
+ return DecodeJSONFromReader(bytes.NewReader(data), out)
+}
+
+// Decodes/Unmarshals the given io.Reader pointing to a JSON, into a desired object
+func DecodeJSONFromReader(r io.Reader, out interface{}) error {
+ if r == nil {
+ return fmt.Errorf("'io.Reader' being decoded is nil")
+ }
+ if out == nil {
+ return fmt.Errorf("output parameter 'out' is nil")
+ }
+
+ dec := json.NewDecoder(r)
+
+ // While decoding JSON values, intepret the integer values as `json.Number`s instead of `float64`.
+ dec.UseNumber()
+
+ // Since 'out' is an interface representing a pointer, pass it to the decoder without an '&'
+ return dec.Decode(out)
+}