Browse Source

WIP: prepare to switch to kdbx v4 by default.

pull/3/head
Herbert Poul 4 years ago
parent
commit
b6b8f5c6a7
  1. 2
      README.md
  2. 9
      lib/src/kdbx_file.dart
  3. 23
      lib/src/kdbx_format.dart
  4. 199
      lib/src/kdbx_header.dart
  5. 4
      lib/src/kdbx_meta.dart
  6. 6
      lib/src/kdbx_xml.dart
  7. 4
      test/kdbx_test.dart

2
README.md

@ -28,6 +28,8 @@ https://github.com/authpass/argon2_ffi
* argon2_ffi/ios/Classes
* `cmake . && cmake --build .`
* `cp libargon2_ffi.dylib kdbx.dart/`
* Might need to run: `codesign --remove-signature /usr/local/bin/dart`
https://github.com/dart-lang/sdk/issues/39231#issuecomment-579743656
* Linux:
* argon2_ffi/ios/Classes
* `cmake . && cmake --build .`

9
lib/src/kdbx_file.dart

@ -10,6 +10,7 @@ import 'package:kdbx/src/kdbx_group.dart';
import 'package:kdbx/src/kdbx_header.dart';
import 'package:kdbx/src/kdbx_object.dart';
import 'package:logging/logging.dart';
import 'package:quiver/check.dart';
import 'package:xml/xml.dart' as xml;
final _logger = Logger('kdbx_file');
@ -109,6 +110,14 @@ class KdbxFile {
KdbxGroup getRecycleBinOrCreate() {
return recycleBin ?? _createRecycleBin();
}
/// Upgrade v3 file to v4.
void upgrade(int majorVersion) {
checkArgument(majorVersion == 4, message: 'Must be majorVersion 4');
body.meta.settingsChanged.setToNow();
body.meta.headerHash.remove();
header.upgrade(majorVersion);
}
}
class CachedValue<T> {

23
lib/src/kdbx_format.dart

@ -94,7 +94,7 @@ class KdbxReadWriteContext {
final KdbxHeader header;
int get versionMajor => header.versionMajor;
int get versionMajor => header.version.major;
KdbxBinary binaryById(int id) {
if (id >= _binaries.length) {
@ -327,13 +327,15 @@ class KdbxFormat {
final Argon2 argon2;
static bool dartWebWorkaround = false;
/// Creates a new, empty [KdbxFile] with default settings.
/// If [header] is not given by default a kdbx 4.0 file will be created.
KdbxFile create(
Credentials credentials,
String name, {
String generator,
KdbxHeader header,
}) {
header ??= KdbxHeader.create();
header ??= KdbxHeader.createV3();
final ctx = KdbxReadWriteContext(binaries: [], header: header);
final meta = KdbxMeta.create(
databaseName: name,
@ -354,14 +356,14 @@ class KdbxFormat {
Future<KdbxFile> read(Uint8List input, Credentials credentials) async {
final reader = ReaderHelper(input);
final header = KdbxHeader.read(reader);
if (header.versionMajor == 3) {
if (header.version.major == KdbxVersion.V3.major) {
return await _loadV3(header, reader, credentials);
} else if (header.versionMajor == 4) {
} else if (header.version.major == KdbxVersion.V4.major) {
return await _loadV4(header, reader, credentials);
} else {
_logger.finer('Unsupported version for $header');
throw KdbxUnsupportedException('Unsupported kdbx version '
'${header.versionMajor}.${header.versionMinor}.'
'${header.version}.'
' Only 3.x and 4.x is supported.');
}
}
@ -377,14 +379,16 @@ class KdbxFormat {
final headerHash =
(crypto.sha256.convert(writer.output.toBytes()).bytes as Uint8List);
if (file.header.versionMajor <= 3) {
if (file.header.version < KdbxVersion.V3) {
throw UnsupportedError('Unsupported version ${header.version}');
} else if (file.header.version < KdbxVersion.V4) {
final streamKey =
file.header.fields[HeaderFields.ProtectedStreamKey].bytes;
final gen = ProtectedSaltGenerator(streamKey);
body.meta.headerHash.set(headerHash.buffer);
await body.writeV3(writer, file, gen);
} else if (header.versionMajor <= 4) {
} else if (header.version.major == KdbxVersion.V4.major) {
final headerBytes = writer.output.toBytes();
writer.writeBytes(headerHash);
final gen = _createProtectedSaltGenerator(header);
@ -393,7 +397,7 @@ class KdbxFormat {
writer.writeBytes(headerHmac.bytes as Uint8List);
body.writeV4(writer, file, gen, keys);
} else {
throw UnsupportedError('Unsupported version ${header.versionMajor}');
throw UnsupportedError('Unsupported version ${header.version}');
}
file.onSaved();
return output.toBytes();
@ -450,7 +454,8 @@ class KdbxFormat {
if (header.compression == Compression.gzip) {
final content = KdbxFormat._gzipDecode(decrypted);
final contentReader = ReaderHelper(content);
final innerHeader = KdbxHeader.readInnerHeaderFields(contentReader, 4);
final innerHeader =
KdbxHeader.readInnerHeaderFields(contentReader, header.version);
// _logger.fine('inner header fields: $headerFields');
// header.innerFields.addAll(headerFields);

199
lib/src/kdbx_header.dart

@ -7,9 +7,10 @@ import 'package:kdbx/src/internal/byte_utils.dart';
import 'package:kdbx/src/internal/consts.dart';
import 'package:kdbx/src/kdbx_binary.dart';
import 'package:kdbx/src/kdbx_var_dictionary.dart';
import 'package:kdbx/src/utils/scope_functions.dart';
import 'package:logging/logging.dart';
import 'package:meta/meta.dart';
import 'package:quiver/check.dart';
import 'package:quiver/core.dart';
final _logger = Logger('kdbx.header');
@ -31,6 +32,16 @@ enum Compression {
/// id: 1
gzip,
}
const _compressionIds = {
Compression.none: 0,
Compression.gzip: 1,
};
final _compressionIdsById =
_compressionIds.map((key, value) => MapEntry(value, key));
extension on Compression {
int get id => _compressionIds[this];
}
/// how protected values are encrypted in the xml.
enum ProtectedValueEncryption { plainText, arc4variant, salsa20, chaCha20 }
@ -51,6 +62,43 @@ enum HeaderFields {
PublicCustomData,
}
class KdbxVersion {
const KdbxVersion._(this.major, this.minor);
static const V3 = KdbxVersion._(3, 0);
static const V3_1 = KdbxVersion._(3, 1);
static const V4 = KdbxVersion._(4, 0);
final int major;
final int minor;
bool operator <(KdbxVersion other) =>
major < other.major || (major == other.major && minor < other.minor);
bool operator >(KdbxVersion other) =>
major > other.major || (major == other.major && minor > other.minor);
bool operator >=(KdbxVersion other) => this == other || this > other;
@override
bool operator ==(Object other) =>
other is KdbxVersion && major == other.major && minor == other.minor;
@override
int get hashCode => hash2(major, minor);
@override
String toString() => '$major.$minor';
}
const _headerFieldsByVersion = {
HeaderFields.TransformSeed: [KdbxVersion.V3],
HeaderFields.TransformRounds: [KdbxVersion.V3],
HeaderFields.ProtectedStreamKey: [KdbxVersion.V3],
HeaderFields.StreamStartBytes: [KdbxVersion.V3],
HeaderFields.InnerRandomStreamID: [KdbxVersion.V3],
};
enum InnerHeaderFields {
EndOfHeader,
InnerRandomStreamID,
@ -86,19 +134,18 @@ class KdbxHeader {
KdbxHeader({
@required this.sig1,
@required this.sig2,
@required this.versionMinor,
@required this.versionMajor,
@required KdbxVersion version,
@required this.fields,
@required this.endPos,
Map<InnerHeaderFields, InnerHeaderField> innerFields,
}) : innerHeader = InnerHeader(fields: innerFields ?? {});
}) : _version = version,
innerHeader = InnerHeader(fields: innerFields ?? {});
KdbxHeader.create()
KdbxHeader.createV3()
: this(
sig1: Consts.FileMagic,
sig2: Consts.Sig2Kdbx,
versionMinor: 1,
versionMajor: 3,
version: KdbxVersion.V3_1,
fields: _defaultFieldValues(),
endPos: null,
);
@ -107,15 +154,15 @@ class KdbxHeader {
: this(
sig1: Consts.FileMagic,
sig2: Consts.Sig2Kdbx,
versionMinor: 1,
versionMajor: 4,
version: KdbxVersion.V4,
fields: _defaultFieldValuesV4(),
innerFields: _defaultInnerFieldValuesV4(),
endPos: null,
);
// TODO: user KdbxVersion
static List<HeaderFields> _requiredFields(int majorVersion) {
if (majorVersion < 3) {
if (majorVersion < KdbxVersion.V3.major) {
throw KdbxUnsupportedException('Unsupported version: $majorVersion');
}
final baseHeaders = [
@ -124,7 +171,7 @@ class KdbxHeader {
HeaderFields.MasterSeed,
HeaderFields.EncryptionIV,
];
if (majorVersion < 4) {
if (majorVersion < KdbxVersion.V4.major) {
return baseHeaders +
[
HeaderFields.TransformSeed,
@ -151,7 +198,7 @@ class KdbxHeader {
}
void _validate() {
for (final required in _requiredFields(versionMajor)) {
for (final required in _requiredFields(version.major)) {
if (fields[required] == null) {
throw KdbxCorruptedFileException('Missing header $required');
}
@ -182,13 +229,13 @@ class KdbxHeader {
// TODO make sure default algorithm is "secure" engouh. Or whether we should
// use like [SecureRandom] from PointyCastle?
_setHeaderField(HeaderFields.MasterSeed, ByteUtils.randomBytes(32));
if (versionMajor < 4) {
if (version.major == KdbxVersion.V3.major) {
_setHeaderField(HeaderFields.TransformSeed, ByteUtils.randomBytes(32));
_setHeaderField(HeaderFields.StreamStartBytes, ByteUtils.randomBytes(32));
_setHeaderField(
HeaderFields.ProtectedStreamKey, ByteUtils.randomBytes(32));
_setHeaderField(HeaderFields.EncryptionIV, ByteUtils.randomBytes(16));
} else if (versionMajor < 5) {
} else if (version.major == KdbxVersion.V4.major) {
_setInnerHeaderField(
InnerHeaderFields.InnerRandomStreamKey, ByteUtils.randomBytes(64));
final kdfParameters = readKdfParameters;
@ -203,7 +250,7 @@ class KdbxHeader {
HeaderFields.EncryptionIV, ByteUtils.randomBytes(ivLength));
} else {
throw KdbxUnsupportedException(
'We do not support Kdbx 3.x and 4.x right now. ($versionMajor.$versionMinor)');
'We do not support Kdbx 3.x and 4.x right now. ($version)');
}
}
@ -213,8 +260,8 @@ class KdbxHeader {
writer.writeUint32(Consts.FileMagic);
writer.writeUint32(Consts.Sig2Kdbx);
// write version
writer.writeUint16(versionMinor);
writer.writeUint16(versionMajor);
writer.writeUint16(version.minor);
writer.writeUint16(version.major);
for (final field
in HeaderFields.values.where((f) => f != HeaderFields.EndOfHeader)) {
_writeField(writer, field);
@ -225,7 +272,7 @@ class KdbxHeader {
}
void writeInnerHeader(WriterHelper writer) {
assert(versionMajor >= 4);
assert(version >= KdbxVersion.V4);
_validateInner();
for (final field in InnerHeaderFields.values
.where((f) => f != InnerHeaderFields.EndOfHeader)) {
@ -268,37 +315,37 @@ class KdbxHeader {
}
void _writeFieldSize(WriterHelper writer, int size) {
if (versionMajor >= 4) {
if (version >= KdbxVersion.V4) {
writer.writeUint32(size);
} else {
writer.writeUint16(size);
}
}
static Map<HeaderFields, HeaderField> _defaultFieldValues() =>
Map.fromEntries([
HeaderField(HeaderFields.CipherID,
CryptoConsts.CIPHER_IDS[Cipher.aes].toBytes()),
HeaderField(
HeaderFields.CompressionFlags, WriterHelper.singleUint32Bytes(1)),
HeaderField(
HeaderFields.TransformRounds, WriterHelper.singleUint64Bytes(6000)),
HeaderField(
HeaderFields.InnerRandomStreamID,
WriterHelper.singleUint32Bytes(ProtectedValueEncryption.values
.indexOf(ProtectedValueEncryption.salsa20))),
].map((f) => MapEntry(f.field, f)));
static Map<HeaderFields, HeaderField> _defaultFieldValues() => _headerFields({
HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes].toBytes(),
HeaderFields.CompressionFlags:
WriterHelper.singleUint32Bytes(Compression.gzip.id),
HeaderFields.TransformRounds: WriterHelper.singleUint64Bytes(6000),
HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes(
ProtectedValueEncryption.values
.indexOf(ProtectedValueEncryption.salsa20)),
});
static Map<HeaderFields, HeaderField> _defaultFieldValuesV4() =>
_defaultFieldValues()
..remove(HeaderFields.TransformRounds)
..remove(HeaderFields.InnerRandomStreamID)
..remove(HeaderFields.ProtectedStreamKey)
..also((fields) {
fields[HeaderFields.KdfParameters] = HeaderField(
HeaderFields.KdfParameters,
_createKdfDefaultParameters().write());
});
_headerFields({
HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes].toBytes(),
HeaderFields.CompressionFlags:
WriterHelper.singleUint32Bytes(Compression.gzip.id),
HeaderFields.KdfParameters: _createKdfDefaultParameters().write(),
HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes(
ProtectedValueEncryption.values
.indexOf(ProtectedValueEncryption.chaCha20)),
});
static Map<HeaderFields, HeaderField> _headerFields(
Map<HeaderFields, Uint8List> headerFields) =>
headerFields.map((key, value) => MapEntry(key, HeaderField(key, value)));
static Map<InnerHeaderFields, InnerHeaderField>
_defaultInnerFieldValuesV4() => Map.fromEntries([
@ -321,35 +368,32 @@ class KdbxHeader {
// reading version
final versionMinor = reader.readUint16();
final versionMajor = reader.readUint16();
final version = KdbxVersion._(versionMajor, versionMinor);
_logger.finer('Reading version: $versionMajor.$versionMinor');
final headerFields = readAllFields(
reader,
versionMajor,
HeaderFields.values,
_logger.finer('Reading version: $version');
final headerFields = readAllFields(reader, version, HeaderFields.values,
(HeaderFields field, value) => HeaderField(field, value));
return KdbxHeader(
sig1: sig1,
sig2: sig2,
versionMinor: versionMinor,
versionMajor: versionMajor,
version: version,
fields: headerFields,
endPos: reader.pos,
);
}
static Map<HeaderFields, HeaderField> readHeaderFields(
ReaderHelper reader, int versionMajor) =>
readAllFields(reader, versionMajor, HeaderFields.values,
ReaderHelper reader, KdbxVersion version) =>
readAllFields(reader, version, HeaderFields.values,
(HeaderFields field, value) => HeaderField(field, value));
static InnerHeader readInnerHeaderFields(
ReaderHelper reader, int versionMajor) =>
ReaderHelper reader, KdbxVersion version) =>
InnerHeader.fromFields(
readField(
reader,
versionMajor,
version,
InnerHeaderFields.values,
(InnerHeaderFields field, value) =>
InnerHeaderField(field, value)).toList(growable: false),
@ -357,22 +401,21 @@ class KdbxHeader {
static Map<TE, T> readAllFields<T extends HeaderFieldBase<TE>, TE>(
ReaderHelper reader,
int versionMajor,
KdbxVersion version,
List<TE> fields,
T Function(TE field, Uint8List bytes) createField) =>
Map<TE, T>.fromEntries(
readField(reader, versionMajor, fields, createField)
.map((field) => MapEntry(field.field, field)));
Map<TE, T>.fromEntries(readField(reader, version, fields, createField)
.map((field) => MapEntry(field.field, field)));
static Iterable<T> readField<T, TE>(
ReaderHelper reader,
int versionMajor,
KdbxVersion version,
List<TE> fields,
T Function(TE field, Uint8List bytes) createField) sync* {
while (true) {
final headerId = reader.readUint8();
final bodySize =
versionMajor >= 4 ? reader.readUint32() : reader.readUint16();
version >= KdbxVersion.V4 ? reader.readUint32() : reader.readUint16();
final bodyBytes = bodySize > 0 ? reader.readBytes(bodySize) : null;
// _logger.finer(
// 'Read header ${fields[headerId]}: ${ByteUtils.toHexList(bodyBytes)}');
@ -396,8 +439,10 @@ class KdbxHeader {
final int sig1;
final int sig2;
final int versionMinor;
final int versionMajor;
KdbxVersion _version;
KdbxVersion get version => _version;
// int get versionMinor => _versionMinor;
// int get versionMajor => _versionMajor;
final Map<HeaderFields, HeaderField> fields;
final InnerHeader innerHeader;
@ -405,26 +450,21 @@ class KdbxHeader {
final int endPos;
Compression get compression {
switch (ReaderHelper.singleUint32(
fields[HeaderFields.CompressionFlags].bytes)) {
case 0:
return Compression.none;
case 1:
return Compression.gzip;
default:
throw KdbxUnsupportedException('compression');
}
final id =
ReaderHelper.singleUint32(fields[HeaderFields.CompressionFlags].bytes);
return _compressionIdsById[id] ??
(() => throw KdbxUnsupportedException('invalid compression $id'))();
}
ProtectedValueEncryption get innerRandomStreamEncryption =>
ProtectedValueEncryption
.values[ReaderHelper.singleUint32(_innerRandomStreamEncryptionBytes)];
Uint8List get _innerRandomStreamEncryptionBytes => versionMajor >= 4
Uint8List get _innerRandomStreamEncryptionBytes => version >= KdbxVersion.V4
? innerHeader.fields[InnerHeaderFields.InnerRandomStreamID].bytes
: fields[HeaderFields.InnerRandomStreamID].bytes;
Uint8List get protectedStreamKey => versionMajor >= 4
Uint8List get protectedStreamKey => version >= KdbxVersion.V4
? innerHeader.fields[InnerHeaderFields.InnerRandomStreamKey].bytes
: fields[HeaderFields.ProtectedStreamKey].bytes;
@ -434,9 +474,24 @@ class KdbxHeader {
void writeKdfParameters(VarDictionary kdfParameters) =>
_setHeaderField(HeaderFields.KdfParameters, kdfParameters.write());
void upgrade(int majorVersion) {
checkArgument(majorVersion == KdbxVersion.V4.major,
message: 'Can only upgrade to 4');
_logger.info('Upgrading header to $majorVersion');
_version = KdbxVersion._(majorVersion, 0);
if (fields[HeaderFields.KdfParameters] == null) {
_logger.fine('Creating kdf parameters.');
writeKdfParameters(_createKdfDefaultParameters());
}
_setHeaderField(
HeaderFields.InnerRandomStreamID,
WriterHelper.singleUint32Bytes(ProtectedValueEncryption.values
.indexOf(ProtectedValueEncryption.chaCha20)));
}
@override
String toString() {
return 'KdbxHeader{sig1: $sig1, sig2: $sig2, versionMajor: $versionMajor, versionMinor: $versionMinor}';
return 'KdbxHeader{sig1: $sig1, sig2: $sig2, version: $version}';
}
}

4
lib/src/kdbx_meta.dart

@ -25,6 +25,7 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
super.create('Meta') {
this.databaseName.set(databaseName);
this.generator.set(generator ?? 'kdbx.dart');
settingsChanged.setToNow();
}
KdbxMeta.read(xml.XmlElement node, this.ctx)
@ -90,6 +91,9 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
UuidNode get recycleBinUUID => UuidNode(this, 'RecycleBinUUID');
DateTimeUtcNode get settingsChanged =>
DateTimeUtcNode(this, 'SettingsChanged');
DateTimeUtcNode get recycleBinChanged =>
DateTimeUtcNode(this, 'RecycleBinChanged');

6
lib/src/kdbx_xml.dart

@ -56,6 +56,12 @@ abstract class KdbxSubNode<T> {
T get();
void set(T value);
void remove() {
for (final el in node.node.findElements(name)) {
el.parentElement.children.remove(el);
}
}
}
abstract class KdbxSubTextNode<T> extends KdbxSubNode<T> {

4
test/kdbx_test.dart

@ -83,7 +83,7 @@ void main() {
test('read mod date time', () async {
final file = await TestUtil.readKdbxFile('test/keepass2test.kdbx');
final first = file.body.rootGroup.entries.first;
expect(file.header.versionMajor, 3);
expect(file.header.version.major, 3);
expect(first.getString(KdbxKey('Title')).getText(), 'Sample Entry');
final modTime = first.times.lastModificationTime.get();
expect(modTime, DateTime.utc(2020, 5, 6, 7, 31, 48));
@ -93,7 +93,7 @@ void main() {
final file = await TestUtil.readKdbxFile('test/keepass2test.kdbx');
{
final first = file.body.rootGroup.entries.first;
expect(file.header.versionMajor, 3);
expect(file.header.version.major, 3);
expect(first.getString(KdbxKey('Title')).getText(), 'Sample Entry');
first.times.lastModificationTime.set(newModDate);
}

Loading…
Cancel
Save