Skip to content

Commit

Permalink
chore(ext/node): port pbkdf2 to Rust (denoland#18470)
Browse files Browse the repository at this point in the history
  • Loading branch information
littledivy committed Mar 28, 2023
1 parent d0a0ff6 commit 67e21e7
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 136 deletions.
12 changes: 12 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

54 changes: 33 additions & 21 deletions cli/tests/unit_node/internal/pbkdf2_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import {
assert,
assertEquals,
} from "../../../../test_util/std/testing/asserts.ts";
import { assertCallbackErrorUncaught } from "../_test_utils.ts";

type Algorithms =
| "md5"
Expand Down Expand Up @@ -320,7 +319,8 @@ const fixtures: Pbkdf2Fixture[] = [
},
];

Deno.test("pbkdf2 hashes data correctly", () => {
Deno.test("pbkdf2 hashes data correctly", async () => {
const promises: Promise<void>[] = [];
fixtures.forEach(({
dkLen,
iterations,
Expand All @@ -330,23 +330,34 @@ Deno.test("pbkdf2 hashes data correctly", () => {
}) => {
for (const algorithm in results) {
if (Object.hasOwn(results, algorithm)) {
pbkdf2(
key,
salt,
iterations,
dkLen,
algorithm as Algorithms,
(err, res) => {
assert(!err, String(err));
assertEquals(
res?.toString("hex"),
results[algorithm as Algorithms],
promises.push(
new Promise((resolve, reject) => {
pbkdf2(
key,
salt,
iterations,
dkLen,
algorithm as Algorithms,
(err, res) => {
try {
assert(!err, String(err));
assertEquals(
res?.toString("hex"),
results[algorithm as Algorithms],
);
resolve();
} catch (e) {
reject(e);
}
},
);
},
}),
);
}
}
});

await Promise.all(promises);
});

Deno.test("pbkdf2Sync hashes data correctly", () => {
Expand All @@ -369,10 +380,11 @@ Deno.test("pbkdf2Sync hashes data correctly", () => {
});
});

Deno.test("[std/node/crypto] pbkdf2 callback isn't called twice if error is thrown", async () => {
const importUrl = new URL("node:crypto", import.meta.url);
await assertCallbackErrorUncaught({
prelude: `import { pbkdf2 } from ${JSON.stringify(importUrl)}`,
invocation: 'pbkdf2("password", "salt", 1, 32, "sha1", ',
});
});
// TODO(@littledivy): assertCallbackErrorUncaught exits for async operations on the thread pool.
// Deno.test("[std/node/crypto] pbkdf2 callback isn't called twice if error is thrown", async () => {
// const importUrl = new URL("node:crypto", import.meta.url);
// await assertCallbackErrorUncaught({
// prelude: `import { pbkdf2 } from ${JSON.stringify(importUrl)};`,
// invocation: 'pbkdf2("password", "salt", 1, 32, "sha1", ',
// });
// });
2 changes: 2 additions & 0 deletions ext/node/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ md-5 = "0.10.5"
md4 = "0.10.2"
once_cell.workspace = true
path-clean = "=0.1.0"
pbkdf2 = "0.12.1"
rand.workspace = true
regex.workspace = true
ripemd = "0.1.3"
Expand All @@ -35,4 +36,5 @@ serde = "1.0.149"
sha-1 = "0.10.0"
sha2 = "0.10.6"
sha3 = "0.10.5"
tokio.workspace = true
typenum = "1.15.0"
55 changes: 55 additions & 0 deletions ext/node/crypto/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -240,3 +240,58 @@ pub fn op_node_decipheriv_final(
.map_err(|_| type_error("Cipher context is already in use"))?;
context.r#final(input, output)
}

fn pbkdf2_sync(
password: &[u8],
salt: &[u8],
iterations: u32,
digest: &str,
derived_key: &mut [u8],
) -> Result<(), AnyError> {
macro_rules! pbkdf2_hmac {
($digest:ty) => {{
pbkdf2::pbkdf2_hmac::<$digest>(password, salt, iterations, derived_key)
}};
}

match digest {
"md4" => pbkdf2_hmac!(md4::Md4),
"md5" => pbkdf2_hmac!(md5::Md5),
"ripemd160" => pbkdf2_hmac!(ripemd::Ripemd160),
"sha1" => pbkdf2_hmac!(sha1::Sha1),
"sha224" => pbkdf2_hmac!(sha2::Sha224),
"sha256" => pbkdf2_hmac!(sha2::Sha256),
"sha384" => pbkdf2_hmac!(sha2::Sha384),
"sha512" => pbkdf2_hmac!(sha2::Sha512),
_ => return Err(type_error("Unknown digest")),
}

Ok(())
}

#[op]
pub fn op_node_pbkdf2(
password: StringOrBuffer,
salt: StringOrBuffer,
iterations: u32,
digest: &str,
derived_key: &mut [u8],
) -> bool {
pbkdf2_sync(&password, &salt, iterations, digest, derived_key).is_ok()
}

#[op]
pub async fn op_node_pbkdf2_async(
password: StringOrBuffer,
salt: StringOrBuffer,
iterations: u32,
digest: String,
keylen: usize,
) -> Result<ZeroCopyBuf, AnyError> {
tokio::task::spawn_blocking(move || {
let mut derived_key = vec![0; keylen];
pbkdf2_sync(&password, &salt, iterations, &digest, &mut derived_key)
.map(|_| derived_key.into())
})
.await?
}
2 changes: 2 additions & 0 deletions ext/node/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ deno_core::extension!(deno_node,
crypto::op_node_private_encrypt,
crypto::op_node_private_decrypt,
crypto::op_node_public_encrypt,
crypto::op_node_pbkdf2,
crypto::op_node_pbkdf2_async,
winerror::op_node_sys_to_uv_error,
v8::op_v8_cached_data_version_tag,
v8::op_v8_get_heap_statistics,
Expand Down
140 changes: 25 additions & 115 deletions ext/node/polyfills/internal/crypto/pbkdf2.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
// Copyright 2018-2023 the Deno authors. All rights reserved. MIT license.
import { Buffer } from "ext:deno_node/buffer.ts";
import { createHash } from "ext:deno_node/internal/crypto/hash.ts";
import { HASH_DATA } from "ext:deno_node/internal/crypto/types.ts";

const { core } = globalThis.__bootstrap;
const { ops } = core;

export const MAX_ALLOC = Math.pow(2, 30) - 1;

export type NormalizedAlgorithms =
Expand All @@ -24,78 +26,6 @@ export type Algorithms =
| "sha384"
| "sha512";

const createHasher = (algorithm: string) => (value: Uint8Array) =>
Buffer.from(createHash(algorithm).update(value).digest() as Buffer);

function getZeroes(zeros: number) {
return Buffer.alloc(zeros);
}

const sizes = {
md5: 16,
sha1: 20,
sha224: 28,
sha256: 32,
sha384: 48,
sha512: 64,
rmd160: 20,
ripemd160: 20,
};

function toBuffer(bufferable: HASH_DATA) {
if (bufferable instanceof Uint8Array || typeof bufferable === "string") {
return Buffer.from(bufferable as Uint8Array);
} else {
return Buffer.from(bufferable.buffer);
}
}

export class Hmac {
hash: (value: Uint8Array) => Buffer;
ipad1: Buffer;
opad: Buffer;
alg: string;
blocksize: number;
size: number;
ipad2: Buffer;

constructor(alg: Algorithms, key: Buffer, saltLen: number) {
this.hash = createHasher(alg);

const blocksize = alg === "sha512" || alg === "sha384" ? 128 : 64;

if (key.length > blocksize) {
key = this.hash(key);
} else if (key.length < blocksize) {
key = Buffer.concat([key, getZeroes(blocksize - key.length)], blocksize);
}

const ipad = Buffer.allocUnsafe(blocksize + sizes[alg]);
const opad = Buffer.allocUnsafe(blocksize + sizes[alg]);
for (let i = 0; i < blocksize; i++) {
ipad[i] = key[i] ^ 0x36;
opad[i] = key[i] ^ 0x5c;
}

const ipad1 = Buffer.allocUnsafe(blocksize + saltLen + 4);
ipad.copy(ipad1, 0, 0, blocksize);

this.ipad1 = ipad1;
this.ipad2 = ipad;
this.opad = opad;
this.alg = alg;
this.blocksize = blocksize;
this.size = sizes[alg];
}

run(data: Buffer, ipad: Buffer) {
data.copy(ipad, this.blocksize);
const h = this.hash(ipad);
h.copy(this.opad, this.blocksize);
return this.hash(this.opad);
}
}

/**
* @param iterations Needs to be higher or equal than zero
* @param keylen Needs to be higher or equal than zero but less than max allocation size (2^30)
Expand All @@ -115,35 +45,12 @@ export function pbkdf2Sync(
throw new TypeError("Bad key length");
}

const bufferedPassword = toBuffer(password);
const bufferedSalt = toBuffer(salt);

const hmac = new Hmac(digest, bufferedPassword, bufferedSalt.length);

const DK = Buffer.allocUnsafe(keylen);
const block1 = Buffer.allocUnsafe(bufferedSalt.length + 4);
bufferedSalt.copy(block1, 0, 0, bufferedSalt.length);

let destPos = 0;
const hLen = sizes[digest];
const l = Math.ceil(keylen / hLen);

for (let i = 1; i <= l; i++) {
block1.writeUInt32BE(i, bufferedSalt.length);

const T = hmac.run(block1, hmac.ipad1);
let U = T;

for (let j = 1; j < iterations; j++) {
U = hmac.run(U, hmac.ipad2);
for (let k = 0; k < hLen; k++) T[k] ^= U[k];
}

T.copy(DK, destPos);
destPos += hLen;
const DK = new Uint8Array(keylen);
if (!ops.op_node_pbkdf2(password, salt, iterations, digest, DK)) {
throw new Error("Invalid digest");
}

return DK;
return Buffer.from(DK);
}

/**
Expand All @@ -159,24 +66,27 @@ export function pbkdf2(
digest: Algorithms = "sha1",
callback: (err: Error | null, derivedKey?: Buffer) => void,
) {
setTimeout(() => {
let err = null,
res;
try {
res = pbkdf2Sync(password, salt, iterations, keylen, digest);
} catch (e) {
err = e;
}
if (err) {
callback(err instanceof Error ? err : new Error("[non-error thrown]"));
} else {
callback(null, res);
}
}, 0);
if (typeof iterations !== "number" || iterations < 0) {
throw new TypeError("Bad iterations");
}
if (typeof keylen !== "number" || keylen < 0 || keylen > MAX_ALLOC) {
throw new TypeError("Bad key length");
}

core.opAsync(
"op_node_pbkdf2_async",
password,
salt,
iterations,
digest,
keylen,
).then(
(DK) => callback(null, Buffer.from(DK)),
)
.catch((err) => callback(err));
}

export default {
Hmac,
MAX_ALLOC,
pbkdf2,
pbkdf2Sync,
Expand Down

0 comments on commit 67e21e7

Please sign in to comment.