mirror of
https://github.com/hedgedoc/hedgedoc.git
synced 2025-05-19 09:45:37 -04:00
refactor: extract api token hash and equality checks
As we need these functions for the seeding it makes sense to extract them into the password.ts and test them accordingly. Signed-off-by: Philip Molares <philip.molares@udo.edu>
This commit is contained in:
parent
b696c1e661
commit
539ea7e33b
3 changed files with 78 additions and 18 deletions
|
@ -17,7 +17,7 @@ import {
|
||||||
TooManyTokensError,
|
TooManyTokensError,
|
||||||
} from '../errors/errors';
|
} from '../errors/errors';
|
||||||
import { ConsoleLoggerService } from '../logger/console-logger.service';
|
import { ConsoleLoggerService } from '../logger/console-logger.service';
|
||||||
import { bufferToBase64Url } from '../utils/password';
|
import { bufferToBase64Url, checkTokenEquality } from '../utils/password';
|
||||||
import { ApiToken } from './api-token.entity';
|
import { ApiToken } from './api-token.entity';
|
||||||
|
|
||||||
export const AUTH_TOKEN_PREFIX = 'hd2';
|
export const AUTH_TOKEN_PREFIX = 'hd2';
|
||||||
|
@ -47,7 +47,7 @@ export class ApiTokenService {
|
||||||
const token = await this.getToken(keyId);
|
const token = await this.getToken(keyId);
|
||||||
this.checkToken(secret, token);
|
this.checkToken(secret, token);
|
||||||
await this.setLastUsedToken(keyId);
|
await this.setLastUsedToken(keyId);
|
||||||
return await token.user;
|
return token.user;
|
||||||
}
|
}
|
||||||
|
|
||||||
createToken(
|
createToken(
|
||||||
|
@ -126,16 +126,7 @@ export class ApiTokenService {
|
||||||
}
|
}
|
||||||
|
|
||||||
checkToken(secret: string, token: ApiToken): void {
|
checkToken(secret: string, token: ApiToken): void {
|
||||||
const userHash = Buffer.from(
|
if (!checkTokenEquality(secret, token.hash)) {
|
||||||
createHash('sha512').update(secret).digest('hex'),
|
|
||||||
);
|
|
||||||
const dbHash = Buffer.from(token.hash);
|
|
||||||
if (
|
|
||||||
// Normally, both hashes have the same length, as they are both SHA512
|
|
||||||
// This is only defense-in-depth, as timingSafeEqual throws if the buffers are not of the same length
|
|
||||||
userHash.length !== dbHash.length ||
|
|
||||||
!timingSafeEqual(userHash, dbHash)
|
|
||||||
) {
|
|
||||||
// hashes are not the same
|
// hashes are not the same
|
||||||
throw new TokenNotValidError(
|
throw new TokenNotValidError(
|
||||||
`Secret does not match Token ${token.label}.`,
|
`Secret does not match Token ${token.label}.`,
|
||||||
|
|
|
@ -1,11 +1,18 @@
|
||||||
/*
|
/*
|
||||||
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
|
* SPDX-FileCopyrightText: 2025 The HedgeDoc developers (see AUTHORS file)
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: AGPL-3.0-only
|
* SPDX-License-Identifier: AGPL-3.0-only
|
||||||
*/
|
*/
|
||||||
import argon2 from '@node-rs/argon2';
|
import argon2 from '@node-rs/argon2';
|
||||||
|
import { randomBytes } from 'crypto';
|
||||||
|
|
||||||
import { bufferToBase64Url, checkPassword, hashPassword } from './password';
|
import {
|
||||||
|
bufferToBase64Url,
|
||||||
|
checkPassword,
|
||||||
|
checkTokenEquality,
|
||||||
|
hashApiToken,
|
||||||
|
hashPassword,
|
||||||
|
} from './password';
|
||||||
|
|
||||||
const testPassword = 'thisIsATestPassword';
|
const testPassword = 'thisIsATestPassword';
|
||||||
const hashOfTestPassword =
|
const hashOfTestPassword =
|
||||||
|
@ -73,3 +80,25 @@ describe('bufferToBase64Url', () => {
|
||||||
).toEqual('dGVzdHNlbnRlbmNlIGlzIGEgdGVzdCBzZW50ZW5jZQ');
|
).toEqual('dGVzdHNlbnRlbmNlIGlzIGEgdGVzdCBzZW50ZW5jZQ');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('hashApiToken', () => {
|
||||||
|
it('correctly hashes a string', () => {
|
||||||
|
const testToken =
|
||||||
|
'LaD52wgw7pi5zVitv4gR5lxoUa6ncTQGASPmXDSdppB9xcd9kCtqjlrdQ8OOfmG9DNXGvfkIwaOCAv8nRp8IoQ';
|
||||||
|
expect(hashApiToken(testToken)).toEqual(
|
||||||
|
'd820de9eb5ace767c14c02f61b9522485f565201443fd366e6ca0d8a18dcffecf91cb27911b8cac566c3aaced44d02b0441a3b72380479f69eaea0f12e4bd73b',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('checkTokenEquality', () => {
|
||||||
|
const testToken =
|
||||||
|
'q72OIg1Y0sKvtsRmxtl86AwWfAF1V7LbVFt5PS0k73iyv3DtpG7Fdn2CADBlq5NsnSWMxGzYLeyux0cdFULmiw';
|
||||||
|
const hasedTestToken = hashApiToken(testToken);
|
||||||
|
it('returns true if the token hashes are the same', () => {
|
||||||
|
expect(checkTokenEquality(testToken, hasedTestToken)).toEqual(true);
|
||||||
|
});
|
||||||
|
it('returns false if the token hashes are the same', () => {
|
||||||
|
expect(checkTokenEquality(testToken, hashApiToken('test'))).toEqual(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
/*
|
/*
|
||||||
* SPDX-FileCopyrightText: 2024 The HedgeDoc developers (see AUTHORS file)
|
* SPDX-FileCopyrightText: 2025 The HedgeDoc developers (see AUTHORS file)
|
||||||
*
|
*
|
||||||
* SPDX-License-Identifier: AGPL-3.0-only
|
* SPDX-License-Identifier: AGPL-3.0-only
|
||||||
*/
|
*/
|
||||||
import { hash, verify } from '@node-rs/argon2';
|
import { hash, verify } from '@node-rs/argon2';
|
||||||
|
import { createHash, timingSafeEqual } from 'crypto';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hashes a password using argon2id
|
* Hashes a password using argon2id
|
||||||
|
@ -35,13 +36,52 @@ export async function checkPassword(
|
||||||
return await verify(passwordHash, cleartext);
|
return await verify(passwordHash, cleartext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform a {@link Buffer} into a base64Url encoded string
|
||||||
|
*
|
||||||
|
* This is necessary as the is no base64url encoding in the toString method
|
||||||
|
* but as can be seen on https://tools.ietf.org/html/rfc4648#page-7
|
||||||
|
* base64url is quite easy buildable from base64
|
||||||
|
*
|
||||||
|
* @param text The buffer we want to decode
|
||||||
|
* @returns The base64Url encoded string
|
||||||
|
*/
|
||||||
export function bufferToBase64Url(text: Buffer): string {
|
export function bufferToBase64Url(text: Buffer): string {
|
||||||
// This is necessary as the is no base64url encoding in the toString method
|
|
||||||
// but as can be seen on https://tools.ietf.org/html/rfc4648#page-7
|
|
||||||
// base64url is quite easy buildable from base64
|
|
||||||
return text
|
return text
|
||||||
.toString('base64')
|
.toString('base64')
|
||||||
.replace(/\+/g, '-')
|
.replace(/\+/g, '-')
|
||||||
.replace(/\//g, '_')
|
.replace(/\//g, '_')
|
||||||
.replace(/=+$/, '');
|
.replace(/=+$/, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hash an api token.
|
||||||
|
*
|
||||||
|
* @param token the token to be hashed
|
||||||
|
* @returns the hashed token
|
||||||
|
*/
|
||||||
|
export function hashApiToken(token: string): string {
|
||||||
|
return createHash('sha512').update(token).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the given token is the same as what we have in the database.
|
||||||
|
*
|
||||||
|
* Normally, both hashes have the same length, as they are both SHA512
|
||||||
|
* This is only defense-in-depth, as timingSafeEqual throws if the buffers are not of the same length
|
||||||
|
*
|
||||||
|
* @param givenToken The token the user gave us.
|
||||||
|
* @param databaseToken The token we have saved in the database.
|
||||||
|
* @returns Wether or not the tokens are the equal
|
||||||
|
*/
|
||||||
|
export function checkTokenEquality(
|
||||||
|
givenToken: string,
|
||||||
|
databaseToken: string,
|
||||||
|
): boolean {
|
||||||
|
const givenHash = Buffer.from(hashApiToken(givenToken));
|
||||||
|
const databaseHash = Buffer.from(databaseToken);
|
||||||
|
return (
|
||||||
|
databaseHash.length === givenHash.length &&
|
||||||
|
timingSafeEqual(givenHash, databaseHash)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue