[bin] renamed in [lib] | [lib.hash] added with tests for cyclic-hashing with xor (algorithm copied from PHP version)
This commit is contained in:
parent
1da9ede808
commit
6363cd41aa
|
@ -0,0 +1,72 @@
|
|||
package hash
|
||||
|
||||
import "crypto/sha512"
|
||||
import "git.xdrm.io/schastsp/lib/xor"
|
||||
|
||||
/* (0) Constants
|
||||
---------------------------------------------------------*/
|
||||
/* (1) Hash size */
|
||||
const HBITSIZE uint = 512;
|
||||
const HSIZE uint = HBITSIZE / 8;
|
||||
|
||||
|
||||
|
||||
|
||||
/* (1) Basic hash function
|
||||
*
|
||||
* @input<[]byte> Byte array input
|
||||
*
|
||||
* @return digest<[]byte]> Byte array digest
|
||||
*
|
||||
---------------------------------------------------------*/
|
||||
func hash(input []byte) []byte{
|
||||
|
||||
/* (1) Create sha512 hasher */
|
||||
hasher := sha512.New();
|
||||
|
||||
/* (2) Set input to be hashed */
|
||||
hasher.Write(input);
|
||||
|
||||
/* (3) Extract digest */
|
||||
return hasher.Sum(nil);
|
||||
|
||||
// digest := base64.StdEncoding.EncodeToString(container)
|
||||
// fmt.Println(digest)
|
||||
}
|
||||
|
||||
|
||||
/* (2) Public hashing interface
|
||||
*
|
||||
* @input<[]byte> Byte array input
|
||||
*
|
||||
* @return digest<[]byte]> Byte array digest
|
||||
*
|
||||
---------------------------------------------------------*/
|
||||
func Hash(input []byte, depth uint, salt []byte, pepper []byte) []byte {
|
||||
|
||||
/* (1) Initialise digest */
|
||||
digest := make([]byte, 0, HSIZE)
|
||||
|
||||
/* (2) Process first hash with @salt */
|
||||
digest = hash( xor.ByteArray(input, salt) )
|
||||
depth--
|
||||
|
||||
/* (3) Iterate @depth times */
|
||||
for depth > 0 {
|
||||
|
||||
// 1. Add Pepper for last time
|
||||
if( depth == 1 ){
|
||||
digest = hash( xor.ByteArray(digest, pepper) )
|
||||
|
||||
// 2. Else only hash
|
||||
} else {
|
||||
digest = hash(digest)
|
||||
|
||||
}
|
||||
|
||||
depth--
|
||||
|
||||
}
|
||||
|
||||
return digest
|
||||
}
|
|
@ -0,0 +1,70 @@
|
|||
package hash
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestSimpleHash(t *testing.T){
|
||||
|
||||
input := []byte("somePlainText");
|
||||
expected := []byte{
|
||||
0x4c, 0xcb, 0x0e, 0xf6, 0x81, 0x99, 0x2e, 0xd6, 0xb8, 0x17, 0x52, 0x1d, 0x09,
|
||||
0x6e, 0x99, 0x19, 0xe7, 0xda, 0x50, 0xc8, 0xbf, 0x64, 0xae, 0xc1, 0x4f, 0xaa,
|
||||
0x47, 0x06, 0xf3, 0x49, 0x30, 0x8a, 0x90, 0x8e, 0xd2, 0xff, 0xc2, 0x6d, 0xee,
|
||||
0xaa, 0xd6, 0x45, 0xd8, 0xb3, 0x17, 0xe3, 0xb9, 0x45, 0x29, 0x26, 0xe2, 0x8e,
|
||||
0x99, 0x50, 0x94, 0x49, 0x90, 0x02, 0xa5, 0x61, 0x4a, 0x3f, 0x5e, 0xfa};
|
||||
byte0 := make([]byte, 0);
|
||||
got := Hash(input, 1, byte0, byte0);
|
||||
got_len := uint(len(got));
|
||||
|
||||
/* (1) Error if wrong size */
|
||||
if got_len != HSIZE {
|
||||
|
||||
t.Errorf("Expected hash digest of %d bytes ; %d bytes received", HSIZE, got_len);
|
||||
|
||||
}
|
||||
|
||||
|
||||
/* (2) Check each byte */
|
||||
for k, v := range got{
|
||||
|
||||
if v != expected[k] {
|
||||
t.Errorf("Expected sha[%d] of '%x' to be '%x' ; received '%x'", HSIZE, input, expected, got);
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
func TestDepthConsistence(t *testing.T){
|
||||
|
||||
input := []byte("someOtherPlainText");
|
||||
byte0 := make([]byte, 0);
|
||||
|
||||
var tests = []struct {
|
||||
got []byte
|
||||
expected []byte
|
||||
}{
|
||||
{ Hash(Hash(input, 1, byte0, byte0), 1, byte0, byte0), Hash(input, 2, byte0, byte0) },
|
||||
{ Hash(Hash(input, 1, byte0, byte0), 1000, byte0, byte0), Hash(input, 1001, byte0, byte0) },
|
||||
{ Hash(Hash(input, 1000, byte0, byte0), 1, byte0, byte0), Hash(input, 1001, byte0, byte0) },
|
||||
};
|
||||
|
||||
|
||||
/* (1) For each case */
|
||||
for _, test := range tests{
|
||||
|
||||
/* (2) Check each byte */
|
||||
for k, v := range test.got{
|
||||
|
||||
if v != test.expected[k] {
|
||||
t.Errorf("Expected '%x' ; received '%x'", test.expected, test.got);
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue