I am trying to write out a library that mimics the functionality of the check identity server 3in nodeJS, but I'm struggling to check the created buffer.
- I'm not sure why, but I get a buffer of various lengths, despite the fact that I am following along with what, in my opinion, is equivalent.
- A function
pbkdf2executed as an async task can have different iteration behavior. - The function
pbkdf2may implement a different version of sha256 or it may simply not be hmac. - I messed up the buffer management and spit between the salt / child layer.
- Copying in this sense may not work as
blockcopyin the above application fromidentity server 3
Although note that the hash that I am trying to verify is taken directly from identity server 3inside a separate application that was launched from ABP boilerplate, but from my own research I do not believe that they will implement their own hashing algorithm or change the settings. The code link c#I used for the conversion can be found here:
https://github.com/aspnet/Identity/blob/rel/2.0.0/src/Microsoft.Extensions.Identity.Core/PasswordHasher.cs#L248
With some further research into the equivalent server 2 equivalent, which uses a more conventional algorithm for verification, I noticed that people are reporting that they needed to change the encoding, but this still does not help with testing.
hashpassword, , , 61, - 84, -, .
.
import crypto from 'crypto';
import util from 'util';
const pbkdf2Async = util.promisify(crypto.pbkdf2);
export default class HashPasswordv3 {
async verifyPassword(password, hashedPassword) {
let decodedBuffer = null;
if (hashedPassword) {
decodedBuffer = Buffer.from(hashedPassword, 'base64');
}
let iteration = 10000;
let key = decodedBuffer[0];
let saltLength = this.readNetworkByteOrder(decodedBuffer, 9);
if (saltLength < 128 / 8) {
return false;
}
let salt = new Buffer(saltLength);
decodedBuffer.copy(salt, 13, 0, saltLength);
console.log(salt);
let subkeyLength = hashedPassword.length - 13 - saltLength;
if (subkeyLength < 128 / 8) {
return false;
}
let expectedSubkey = new Buffer(subkeyLength);
decodedBuffer.copy(expectedSubkey, 0, 13 + saltLength, expectedSubkey.length);
console.log(expectedSubkey);
let acutalSubkey = await pbkdf2Async(password, salt, 10000, 32, 'sha256');
console.log(acutalSubkey);
console.log(this.areBuffersEqual(acutalSubkey, expectedSubkey));
}
async hashPassword(password) {
try {
let salt = await crypto.randomBytes(16);
let subkey = await pbkdf2Async(password, salt, 10000, 32, 'sha256');
let outputBytes = new Buffer(13 + salt.length + subkey.length);
outputBytes[0] = 0x01;
this.writeNetworkByteOrder(outputBytes, 1, 1);
this.writeNetworkByteOrder(outputBytes, 5, 10000);
this.writeNetworkByteOrder(outputBytes, 9, salt.length);
salt.copy(outputBytes, 13, 0, 16);
subkey.copy(outputBytes, 13 + salt.length, 0, subkey.length);
console.log(outputBytes.toString('base64'));
} catch (e) {
console.log(e);
}
}
writeNetworkByteOrder(buffer, offset, value) {
buffer[offset + 0] = value >> 0;
buffer[offset + 1] = value >> 8;
buffer[offset + 2] = value >> 16;
buffer[offset + 3] = value >> 24;
}
readNetworkByteOrder(buffer, offset) {
return ((buffer[offset + 0]) << 24)
| ((buffer[offset + 1]) << 16)
| ((buffer[offset + 2]) << 8)
| ((buffer[offset + 3]));
}
byteArraysEqual(a, b) {
if (Buffer.compare(a, b)) {
return true;
}
if (a == null || b == null || a.Length !== b.Length) {
return false;
}
let areSame = true;
for (let i = 0; i < a.Length; i++) {
areSame &= (a[i] === b[i]);
}
return areSame;
}
areBuffersEqual(bufA, bufB) {
let len = bufA.length;
if (len !== bufB.length) {
return false;
}
for (let i = 0; i < len; i++) {
if (bufA.readUInt8(i) !== bufB.readUInt8(i)) {
return false;
}
}
return true;
}
}
:
import identityHasher from '../IdentityServer3/HashPasswordv3';
const hasher = new identityHasher();
let result = await hasher.verifyPassword('test', 'AQAAAAEAACcQAAAAEGKKbVuUwa4Y6qIclGpTE95X6wSw0mdwhMjXMBpAnHrjrQlHngJCgeuTf52w91UruA==');