Supplementary Go cryptography libraries https://godoc.org/maze.io/x/crypto
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

afis.go 2.8KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. package afis // import "maze.io/x/crypto/afis"
  2. import (
  3. "crypto/rand"
  4. "crypto/sha1"
  5. "encoding/binary"
  6. "errors"
  7. "hash"
  8. "io"
  9. "math"
  10. )
  11. // Errors.
  12. var (
  13. ErrMinStripe = errors.New("afis: at least one stripe is required")
  14. ErrDataLen = errors.New("afis: data length is not multiple of stripes")
  15. )
  16. // DefaultHash is our default hashing function.
  17. var DefaultHash = sha1.New
  18. // Split data using the default SHA-1 hash.
  19. func Split(data []byte, stripes int) ([]byte, error) {
  20. return SplitHash(data, stripes, DefaultHash)
  21. }
  22. // SplitHash splits data using the selected hash function.
  23. func SplitHash(data []byte, stripes int, hashFunc func() hash.Hash) ([]byte, error) {
  24. if stripes < 1 {
  25. return nil, ErrMinStripe
  26. }
  27. var (
  28. blockSize = len(data)
  29. block = make([]byte, blockSize)
  30. random = make([]byte, blockSize)
  31. splitted []byte
  32. )
  33. for i := 0; i < stripes-1; i++ {
  34. if _, err := io.ReadFull(rand.Reader, random); err != nil {
  35. return nil, err
  36. }
  37. splitted = append(splitted, random...)
  38. xor(block, random, block)
  39. block = diffuse(block, blockSize, hashFunc)
  40. }
  41. size := len(splitted)
  42. splitted = append(splitted, make([]byte, blockSize)...)
  43. xor(splitted[size:], block, data)
  44. return splitted, nil
  45. }
  46. // Merge data splitted previously with Split using the default SHA-1 hash.
  47. func Merge(data []byte, stripes int) ([]byte, error) {
  48. return MergeHash(data, stripes, DefaultHash)
  49. }
  50. // MergeHash merges data splitted previously with the selected hash function.
  51. func MergeHash(data []byte, stripes int, hashFunc func() hash.Hash) ([]byte, error) {
  52. if len(data)%stripes != 0 {
  53. return nil, ErrDataLen
  54. }
  55. var (
  56. blockSize = len(data) / stripes
  57. block = make([]byte, blockSize)
  58. )
  59. for i := 0; i < stripes-1; i++ {
  60. offset := i * blockSize
  61. xor(block, data[offset:offset+blockSize], block)
  62. block = diffuse(block, blockSize, hashFunc)
  63. }
  64. xor(block, data[(stripes-1)*blockSize:], block)
  65. return block, nil
  66. }
  67. func xor(dst, src1, src2 []byte) {
  68. for i := range dst {
  69. dst[i] = src1[i] ^ src2[i]
  70. }
  71. }
  72. func diffuse(block []byte, size int, hashFunc func() hash.Hash) []byte {
  73. var (
  74. hash = hashFunc()
  75. digestSize = hash.Size()
  76. blocks = int(math.Floor(float64(len(block)) / float64(digestSize)))
  77. padding = len(block) % digestSize
  78. diffused []byte
  79. )
  80. // Hash full blocks
  81. for i := 0; i < blocks; i++ {
  82. offset := i * digestSize
  83. hash.Reset()
  84. hash.Write(packInt(i))
  85. hash.Write(block[offset : offset+digestSize])
  86. diffused = append(diffused, hash.Sum(nil)...)
  87. }
  88. // Hash remainder
  89. if padding > 0 {
  90. hash.Reset()
  91. hash.Write(packInt(blocks))
  92. hash.Write(block[blocks*digestSize:])
  93. diffused = append(diffused, hash.Sum(nil)[:padding]...)
  94. }
  95. return diffused
  96. }
  97. func packInt(i int) []byte {
  98. var packed [4]byte
  99. binary.BigEndian.PutUint32(packed[:], uint32(i))
  100. return packed[:]
  101. }