Browse Source

migrate to nnbd

null-safety
Herbert Poul 4 years ago
parent
commit
a5c4d6a0e7
  1. 14
      bin/kdbx.dart
  2. 28
      lib/src/crypto/key_encrypter_kdf.dart
  3. 4
      lib/src/crypto/protected_salt_generator.dart
  4. 8
      lib/src/crypto/protected_value.dart
  5. 2
      lib/src/internal/async_utils.dart
  6. 2
      lib/src/internal/consts.dart
  7. 2
      lib/src/internal/crypto_utils.dart
  8. 19
      lib/src/internal/extension_utils.dart
  9. 26
      lib/src/kdbx_binary.dart
  10. 2
      lib/src/kdbx_custom_data.dart
  11. 14
      lib/src/kdbx_dao.dart
  12. 4
      lib/src/kdbx_deleted_object.dart
  13. 62
      lib/src/kdbx_entry.dart
  14. 10
      lib/src/kdbx_file.dart
  15. 110
      lib/src/kdbx_format.dart
  16. 14
      lib/src/kdbx_group.dart
  17. 80
      lib/src/kdbx_header.dart
  18. 32
      lib/src/kdbx_meta.dart
  19. 33
      lib/src/kdbx_object.dart
  20. 8
      lib/src/kdbx_var_dictionary.dart
  21. 49
      lib/src/kdbx_xml.dart
  22. 38
      lib/src/utils/byte_utils.dart
  23. 12
      lib/src/utils/print_utils.dart
  24. 4
      pubspec.yaml
  25. 2
      test/deleted_objects_test.dart
  26. 2
      test/icon/kdbx_customicon_test.dart
  27. 2
      test/internal/test_utils.dart
  28. 4
      test/kdbx4_test.dart
  29. 8
      test/kdbx_binaries_test.dart
  30. 20
      test/kdbx_history_test.dart
  31. 6
      test/kdbx_test.dart
  32. 2
      test/merge/kdbx_merge_test.dart

14
bin/kdbx.dart

@ -16,7 +16,7 @@ final _logger = Logger('kdbx');
void main(List<String> arguments) { void main(List<String> arguments) {
exitCode = 0; exitCode = 0;
final runner = KdbxCommandRunner('kdbx', 'Kdbx Utility'); final runner = KdbxCommandRunner('kdbx', 'Kdbx Utility');
runner.run(arguments).catchError((dynamic error, StackTrace stackTrace) { runner.run(arguments).catchError((Object error, StackTrace stackTrace) {
if (error is! UsageException) { if (error is! UsageException) {
return Future<dynamic>.error(error, stackTrace); return Future<dynamic>.error(error, stackTrace);
} }
@ -76,14 +76,14 @@ abstract class KdbxFileCommand extends Command<void> {
@override @override
FutureOr<void> run() async { FutureOr<void> run() async {
final inputFile = argResults['input'] as String; final inputFile = argResults!['input'] as String?;
if (inputFile == null) { if (inputFile == null) {
usageException('Required argument: --input'); usageException('Required argument: --input');
} }
final bytes = await File(inputFile).readAsBytes(); final bytes = await File(inputFile).readAsBytes();
final password = argResults['password'] as String ?? final password = argResults!['password'] as String? ??
_readPassword('Password for $inputFile: '); _readPassword('Password for $inputFile: ');
final keyFile = argResults['keyfile'] as String; final keyFile = argResults!['keyfile'] as String?;
final keyFileData = final keyFileData =
keyFile == null ? null : await File(keyFile).readAsBytes(); keyFile == null ? null : await File(keyFile).readAsBytes();
@ -103,7 +103,7 @@ String _readPassword(String prompt) {
stdin.echoMode = false; stdin.echoMode = false;
stdout.write(prompt); stdout.write(prompt);
while (true) { while (true) {
final input = stdin.readLineSync(); final input = stdin.readLineSync()!;
if (input.isNotEmpty) { if (input.isNotEmpty) {
return input; return input;
} }
@ -127,9 +127,9 @@ class CatCommand extends KdbxFileCommand {
@override @override
String get name => 'cat'; String get name => 'cat';
bool get forceDecrypt => argResults['decrypt'] as bool; bool? get forceDecrypt => argResults!['decrypt'] as bool?;
bool get allFields => argResults['all-fields'] as bool; bool? get allFields => argResults!['all-fields'] as bool?;
@override @override
Future<void> runWithFile(KdbxFile file) async { Future<void> runWithFile(KdbxFile file) async {

28
lib/src/crypto/key_encrypter_kdf.dart

@ -49,7 +49,7 @@ class KdfField<T> {
.fine('VarDictionary{\n${fields.map((f) => f.debug(dict)).join('\n')}'); .fine('VarDictionary{\n${fields.map((f) => f.debug(dict)).join('\n')}');
} }
T read(VarDictionary dict) => dict.get(type, field); T? read(VarDictionary dict) => dict.get(type, field);
void write(VarDictionary dict, T value) => dict.set(type, field, value); void write(VarDictionary dict, T value) => dict.set(type, field, value);
VarDictionaryItem<T> item(T value) => VarDictionaryItem<T> item(T value) =>
VarDictionaryItem<T>(field, type, value); VarDictionaryItem<T>(field, type, value);
@ -57,7 +57,7 @@ class KdfField<T> {
String debug(VarDictionary dict) { String debug(VarDictionary dict) {
final value = dict.get(type, field); final value = dict.get(type, field);
final strValue = type == ValueType.typeBytes final strValue = type == ValueType.typeBytes
? ByteUtils.toHexList(value as Uint8List) ? ByteUtils.toHexList(value as Uint8List?)
: value; : value;
return '$field=$strValue'; return '$field=$strValue';
} }
@ -76,7 +76,7 @@ class KeyEncrypterKdf {
return KdbxUuid(uuid); return KdbxUuid(uuid);
} }
static KdfType kdfTypeFor(VarDictionary kdfParameters) { static KdfType? kdfTypeFor(VarDictionary kdfParameters) {
final uuid = KdfField.uuid.read(kdfParameters); final uuid = KdfField.uuid.read(kdfParameters);
if (uuid == null) { if (uuid == null) {
throw KdbxCorruptedFileException('No Kdf UUID'); throw KdbxCorruptedFileException('No Kdf UUID');
@ -110,20 +110,20 @@ class KeyEncrypterKdf {
Uint8List key, VarDictionary kdfParameters) async { Uint8List key, VarDictionary kdfParameters) async {
return await argon2.argon2Async(Argon2Arguments( return await argon2.argon2Async(Argon2Arguments(
key, key,
KdfField.salt.read(kdfParameters), KdfField.salt.read(kdfParameters)!,
// 65536, //KdfField.memory.read(kdfParameters), // 65536, //KdfField.memory.read(kdfParameters),
KdfField.memory.read(kdfParameters) ~/ 1024, KdfField.memory.read(kdfParameters)! ~/ 1024,
KdfField.iterations.read(kdfParameters), KdfField.iterations.read(kdfParameters)!,
32, 32,
KdfField.parallelism.read(kdfParameters), KdfField.parallelism.read(kdfParameters)!,
0, 0,
KdfField.version.read(kdfParameters), KdfField.version.read(kdfParameters)!,
)); ));
} }
Future<Uint8List> encryptAes( Future<Uint8List> encryptAes(
Uint8List key, VarDictionary kdfParameters) async { Uint8List key, VarDictionary kdfParameters) async {
final encryptionKey = KdfField.salt.read(kdfParameters); final encryptionKey = KdfField.salt.read(kdfParameters)!;
final rounds = KdfField.rounds.read(kdfParameters); final rounds = KdfField.rounds.read(kdfParameters);
assert(encryptionKey.length == 32); assert(encryptionKey.length == 32);
return await encryptAesAsync(EncryptAesArgs(encryptionKey, key, rounds)); return await encryptAesAsync(EncryptAesArgs(encryptionKey, key, rounds));
@ -137,7 +137,7 @@ class KeyEncrypterKdf {
final s = Stopwatch()..start(); final s = Stopwatch()..start();
try { try {
_logger.finest('Starting encryptAes for ${args.rounds} ' _logger.finest('Starting encryptAes for ${args.rounds} '
'rounds in isolate. ${args.encryptionKey.length} ${args.key.length}'); 'rounds in isolate. ${args.encryptionKey!.length} ${args.key.length}');
return await runner.run(_encryptAesSync, args); return await runner.run(_encryptAesSync, args);
} finally { } finally {
_logger.finest('Done aes encrypt. ${s.elapsed}'); _logger.finest('Done aes encrypt. ${s.elapsed}');
@ -147,11 +147,11 @@ class KeyEncrypterKdf {
static Uint8List _encryptAesSync(EncryptAesArgs args) { static Uint8List _encryptAesSync(EncryptAesArgs args) {
final cipher = ECBBlockCipher(AESFastEngine()) final cipher = ECBBlockCipher(AESFastEngine())
..init(true, KeyParameter(args.encryptionKey)); ..init(true, KeyParameter(args.encryptionKey!));
var out1 = Uint8List.fromList(args.key); var out1 = Uint8List.fromList(args.key);
var out2 = Uint8List(args.key.length); var out2 = Uint8List(args.key.length);
final rounds = args.rounds; final rounds = args.rounds!;
for (var i = 0; i < rounds; i++) { for (var i = 0; i < rounds; i++) {
for (var j = 0; j < out1.lengthInBytes;) { for (var j = 0; j < out1.lengthInBytes;) {
j += cipher.processBlock(out1, j, out2, j); j += cipher.processBlock(out1, j, out2, j);
@ -167,7 +167,7 @@ class KeyEncrypterKdf {
class EncryptAesArgs { class EncryptAesArgs {
EncryptAesArgs(this.encryptionKey, this.key, this.rounds); EncryptAesArgs(this.encryptionKey, this.key, this.rounds);
final Uint8List encryptionKey; final Uint8List? encryptionKey;
final Uint8List key; final Uint8List key;
final int rounds; final int? rounds;
} }

4
lib/src/crypto/protected_salt_generator.dart

@ -24,7 +24,7 @@ class ProtectedSaltGenerator {
Uint8List.fromList([0xE8, 0x30, 0x09, 0x4B, 0x97, 0x20, 0x5D, 0x2A]); Uint8List.fromList([0xE8, 0x30, 0x09, 0x4B, 0x97, 0x20, 0x5D, 0x2A]);
final StreamCipher _cipher; final StreamCipher _cipher;
String decryptBase64(String protectedValue) { String? decryptBase64(String protectedValue) {
final bytes = base64.decode(protectedValue); final bytes = base64.decode(protectedValue);
if (bytes.isEmpty) { if (bytes.isEmpty) {
_logger.warning('decoded base64 data has length 0'); _logger.warning('decoded base64 data has length 0');
@ -70,7 +70,7 @@ class ChachaProtectedSaltGenerator implements ProtectedSaltGenerator {
StreamCipher get _cipher => throw UnimplementedError(); StreamCipher get _cipher => throw UnimplementedError();
@override @override
String decryptBase64(String protectedValue) { String? decryptBase64(String protectedValue) {
final bytes = base64.decode(protectedValue); final bytes = base64.decode(protectedValue);
if (bytes.isEmpty) { if (bytes.isEmpty) {
_logger.warning('decoded base64 data has length 0'); _logger.warning('decoded base64 data has length 0');

8
lib/src/crypto/protected_value.dart

@ -6,16 +6,16 @@ import 'package:crypto/crypto.dart';
abstract class StringValue { abstract class StringValue {
/// retrieves the (decrypted) stored value. /// retrieves the (decrypted) stored value.
String getText(); String? getText();
} }
class PlainValue implements StringValue { class PlainValue implements StringValue {
PlainValue(this.text); PlainValue(this.text);
final String text; final String? text;
@override @override
String getText() { String? getText() {
return text; return text;
} }
@ -78,7 +78,7 @@ class ProtectedValue implements StringValue {
bool operator ==(dynamic other) => bool operator ==(dynamic other) =>
other is ProtectedValue && other.getText() == getText(); other is ProtectedValue && other.getText() == getText();
int _hashCodeCached; int? _hashCodeCached;
@override @override
int get hashCode => _hashCodeCached ??= getText().hashCode; int get hashCode => _hashCodeCached ??= getText().hashCode;

2
lib/src/internal/async_utils.dart

@ -8,7 +8,7 @@ mixin StreamSubscriberBase {
/// Listens to a stream and saves it to the list of subscriptions. /// Listens to a stream and saves it to the list of subscriptions.
void listen(Stream<dynamic> stream, void Function(dynamic data) onData, void listen(Stream<dynamic> stream, void Function(dynamic data) onData,
{Function onError}) { {Function? onError}) {
if (stream != null) { if (stream != null) {
_subscriptions.add(stream.listen(onData, onError: onError)); _subscriptions.add(stream.listen(onData, onError: onError));
} }

2
lib/src/internal/consts.dart

@ -18,6 +18,6 @@ class CryptoConsts {
static final cipherByUuid = static final cipherByUuid =
CIPHER_IDS.map((key, value) => MapEntry(value, key)); CIPHER_IDS.map((key, value) => MapEntry(value, key));
static Cipher cipherFromBytes(Uint8List bytes) => static Cipher? cipherFromBytes(Uint8List bytes) =>
cipherByUuid[KdbxUuid.fromBytes(bytes)]; cipherByUuid[KdbxUuid.fromBytes(bytes)];
} }

2
lib/src/internal/crypto_utils.dart

@ -15,7 +15,7 @@ class AesHelper {
static Uint8List deriveKey( static Uint8List deriveKey(
Uint8List password, { Uint8List password, {
Uint8List salt, required Uint8List salt,
int iterationCount = ITERATION_COUNT, int iterationCount = ITERATION_COUNT,
int derivedKeyLength = KEY_SIZE, int derivedKeyLength = KEY_SIZE,
}) { }) {

19
lib/src/internal/extension_utils.dart

@ -2,8 +2,8 @@ import 'package:kdbx/src/kdbx_xml.dart';
import 'package:xml/xml.dart' as xml; import 'package:xml/xml.dart' as xml;
extension XmlElementExt on xml.XmlElement { extension XmlElementExt on xml.XmlElement {
xml.XmlElement singleElement(String nodeName, xml.XmlElement? singleElement(String nodeName,
{xml.XmlElement Function() orElse}) { {xml.XmlElement Function()? orElse}) {
final elements = findElements(nodeName); final elements = findElements(nodeName);
if (elements.isEmpty) { if (elements.isEmpty) {
if (orElse != null) { if (orElse != null) {
@ -42,17 +42,18 @@ extension ObjectExt<T> on T {
R let<R>(R Function(T that) op) => op(this); R let<R>(R Function(T that) op) => op(this);
} }
extension StringExt on String { extension StringExt on String? {
String takeUnlessBlank() => nullIfBlank(); String? takeUnlessBlank() => nullIfBlank();
String nullIfBlank() { String? nullIfBlank() {
if (this == null || isEmpty) { final t = this;
if (t == null || t.isEmpty) {
return null; return null;
} }
return this; return this;
} }
} }
extension IterableExt<T> on Iterable<T> { // extension IterableExt<T> on Iterable<T> {
T get singleOrNull => singleWhere((element) => true, orElse: () => null); // T? get singleOrNull => singleWhereOrNull((element) => true);
} // }

26
lib/src/kdbx_binary.dart

@ -11,15 +11,15 @@ import 'package:xml/xml.dart';
class KdbxBinary { class KdbxBinary {
KdbxBinary({this.isInline, this.isProtected, this.value}); KdbxBinary({this.isInline, this.isProtected, this.value});
final bool isInline; final bool? isInline;
final bool isProtected; final bool? isProtected;
final Uint8List value; final Uint8List? value;
int _valueHashCode; int? _valueHashCode;
static KdbxBinary readBinaryInnerHeader(InnerHeaderField field) { static KdbxBinary readBinaryInnerHeader(InnerHeaderField field) {
final flags = field.bytes[0]; final flags = field.bytes![0];
final isProtected = flags & 0x01 == 0x01; final isProtected = flags & 0x01 == 0x01;
final value = Uint8List.sublistView(field.bytes, 1); final value = Uint8List.sublistView(field.bytes!, 1);
return KdbxBinary( return KdbxBinary(
isInline: false, isInline: false,
isProtected: isProtected, isProtected: isProtected,
@ -27,22 +27,22 @@ class KdbxBinary {
); );
} }
int get valueHashCode => _valueHashCode ??= hashObjects(value); int get valueHashCode => _valueHashCode ??= hashObjects(value!);
bool valueEqual(KdbxBinary other) => bool valueEqual(KdbxBinary other) =>
valueHashCode == other.valueHashCode && ByteUtils.eq(value, value); valueHashCode == other.valueHashCode && ByteUtils.eq(value!, value!);
InnerHeaderField writeToInnerHeader() { InnerHeaderField writeToInnerHeader() {
final writer = WriterHelper(); final writer = WriterHelper();
final flags = isProtected ? 0x01 : 0x00; final flags = isProtected! ? 0x01 : 0x00;
writer.writeUint8(flags); writer.writeUint8(flags);
writer.writeBytes(value); writer.writeBytes(value!);
return InnerHeaderField( return InnerHeaderField(
InnerHeaderFields.Binary, writer.output.takeBytes()); InnerHeaderFields.Binary, writer.output.takeBytes());
} }
static KdbxBinary readBinaryXml(XmlElement valueNode, static KdbxBinary readBinaryXml(XmlElement valueNode,
{@required bool isInline}) { {required bool isInline}) {
assert(isInline != null); assert(isInline != null);
final isProtected = valueNode.getAttributeBool(KdbxXml.ATTR_PROTECTED); final isProtected = valueNode.getAttributeBool(KdbxXml.ATTR_PROTECTED);
final isCompressed = valueNode.getAttributeBool(KdbxXml.ATTR_COMPRESSED); final isCompressed = valueNode.getAttributeBool(KdbxXml.ATTR_COMPRESSED);
@ -58,8 +58,8 @@ class KdbxBinary {
} }
void saveToXml(XmlElement valueNode) { void saveToXml(XmlElement valueNode) {
final content = base64.encode(gzip.encode(value)); final content = base64.encode(gzip.encode(value!));
valueNode.addAttributeBool(KdbxXml.ATTR_PROTECTED, isProtected); valueNode.addAttributeBool(KdbxXml.ATTR_PROTECTED, isProtected!);
valueNode.addAttributeBool(KdbxXml.ATTR_COMPRESSED, true); valueNode.addAttributeBool(KdbxXml.ATTR_COMPRESSED, true);
valueNode.children.add(XmlText(content)); valueNode.children.add(XmlText(content));
} }

2
lib/src/kdbx_custom_data.dart

@ -23,7 +23,7 @@ class KdbxCustomData extends KdbxNode {
Iterable<MapEntry<String, String>> get entries => _data.entries; Iterable<MapEntry<String, String>> get entries => _data.entries;
String operator [](String key) => _data[key]; String? operator [](String key) => _data[key];
void operator []=(String key, String value) { void operator []=(String key, String value) {
modify(() => _data[key] = value); modify(() => _data[key] = value);
} }

14
lib/src/kdbx_dao.dart

@ -8,8 +8,8 @@ import 'package:meta/meta.dart';
/// a kdbx file. /// a kdbx file.
extension KdbxDao on KdbxFile { extension KdbxDao on KdbxFile {
KdbxGroup createGroup({ KdbxGroup createGroup({
@required KdbxGroup parent, required KdbxGroup parent,
@required String name, required String name,
}) { }) {
assert(parent != null, name != null); assert(parent != null, name != null);
final newGroup = KdbxGroup.create(ctx: ctx, parent: parent, name: name); final newGroup = KdbxGroup.create(ctx: ctx, parent: parent, name: name);
@ -17,10 +17,10 @@ extension KdbxDao on KdbxFile {
return newGroup; return newGroup;
} }
KdbxGroup findGroupByUuid(KdbxUuid uuid) => KdbxGroup findGroupByUuid(KdbxUuid? uuid) =>
body.rootGroup.getAllGroups().firstWhere((group) => group.uuid == uuid, body.rootGroup.getAllGroups().firstWhere((group) => group.uuid == uuid,
orElse: () => orElse: (() =>
throw StateError('Unable to find group with uuid $uuid')); throw StateError('Unable to find group with uuid $uuid')) as KdbxGroup Function()?);
void deleteGroup(KdbxGroup group) { void deleteGroup(KdbxGroup group) {
move(group, getRecycleBinOrCreate()); move(group, getRecycleBinOrCreate());
@ -34,11 +34,11 @@ extension KdbxDao on KdbxFile {
assert(toGroup != null); assert(toGroup != null);
kdbxObject.times.locationChanged.setToNow(); kdbxObject.times.locationChanged.setToNow();
if (kdbxObject is KdbxGroup) { if (kdbxObject is KdbxGroup) {
kdbxObject.parent.internalRemoveGroup(kdbxObject); kdbxObject.parent!.internalRemoveGroup(kdbxObject);
kdbxObject.internalChangeParent(toGroup); kdbxObject.internalChangeParent(toGroup);
toGroup.addGroup(kdbxObject); toGroup.addGroup(kdbxObject);
} else if (kdbxObject is KdbxEntry) { } else if (kdbxObject is KdbxEntry) {
kdbxObject.parent.internalRemoveEntry(kdbxObject); kdbxObject.parent!.internalRemoveEntry(kdbxObject);
kdbxObject.internalChangeParent(toGroup); kdbxObject.internalChangeParent(toGroup);
toGroup.addEntry(kdbxObject); toGroup.addEntry(kdbxObject);
} }

4
lib/src/kdbx_deleted_object.dart

@ -4,7 +4,7 @@ import 'package:kdbx/src/kdbx_xml.dart';
import 'package:xml/xml.dart'; import 'package:xml/xml.dart';
class KdbxDeletedObject extends KdbxNode implements KdbxNodeContext { class KdbxDeletedObject extends KdbxNode implements KdbxNodeContext {
KdbxDeletedObject.create(this.ctx, KdbxUuid uuid) : super.create(NODE_NAME) { KdbxDeletedObject.create(this.ctx, KdbxUuid? uuid) : super.create(NODE_NAME) {
_uuid.set(uuid); _uuid.set(uuid);
deletionTime.setToNow(); deletionTime.setToNow();
} }
@ -16,7 +16,7 @@ class KdbxDeletedObject extends KdbxNode implements KdbxNodeContext {
@override @override
final KdbxReadWriteContext ctx; final KdbxReadWriteContext ctx;
KdbxUuid get uuid => _uuid.get(); KdbxUuid? get uuid => _uuid.get();
UuidNode get _uuid => UuidNode(this, KdbxXml.NODE_UUID); UuidNode get _uuid => UuidNode(this, KdbxXml.NODE_UUID);
DateTimeUtcNode get deletionTime => DateTimeUtcNode(this, 'DeletionTime'); DateTimeUtcNode get deletionTime => DateTimeUtcNode(this, 'DeletionTime');
} }

62
lib/src/kdbx_entry.dart

@ -1,5 +1,6 @@
import 'dart:typed_data'; import 'dart:typed_data';
import 'package:collection/collection.dart' show IterableExtension;
import 'package:kdbx/src/crypto/protected_value.dart'; import 'package:kdbx/src/crypto/protected_value.dart';
import 'package:kdbx/src/internal/extension_utils.dart'; import 'package:kdbx/src/internal/extension_utils.dart';
import 'package:kdbx/src/kdbx_binary.dart'; import 'package:kdbx/src/kdbx_binary.dart';
@ -11,7 +12,6 @@ import 'package:kdbx/src/kdbx_header.dart';
import 'package:kdbx/src/kdbx_object.dart'; import 'package:kdbx/src/kdbx_object.dart';
import 'package:kdbx/src/kdbx_xml.dart'; import 'package:kdbx/src/kdbx_xml.dart';
import 'package:logging/logging.dart'; import 'package:logging/logging.dart';
import 'package:meta/meta.dart';
import 'package:path/path.dart' as path; import 'package:path/path.dart' as path;
import 'package:quiver/check.dart'; import 'package:quiver/check.dart';
import 'package:xml/xml.dart'; import 'package:xml/xml.dart';
@ -76,7 +76,7 @@ class KdbxKey {
extension KdbxEntryInternal on KdbxEntry { extension KdbxEntryInternal on KdbxEntry {
KdbxEntry cloneInto(KdbxGroup otherGroup, {bool toHistoryEntry = false}) => KdbxEntry cloneInto(KdbxGroup otherGroup, {bool toHistoryEntry = false}) =>
KdbxEntry.create( KdbxEntry.create(
otherGroup.file, otherGroup.file!,
otherGroup, otherGroup,
isHistoryEntry: toHistoryEntry, isHistoryEntry: toHistoryEntry,
) )
@ -130,12 +130,12 @@ extension KdbxEntryInternal on KdbxEntry {
times.overwriteFrom(other.times); times.overwriteFrom(other.times);
if (includeHistory) { if (includeHistory) {
for (final historyEntry in other.history) { for (final historyEntry in other.history) {
history.add(historyEntry.cloneInto(parent, toHistoryEntry: false)); history.add(historyEntry.cloneInto(parent!, toHistoryEntry: false));
} }
} }
} }
List<String> _diffMap(Map<Object, Object> a, Map<Object, Object> b) { List<String> _diffMap(Map<Object, Object?> a, Map<Object, Object?> b) {
final keys = {...a.keys, ...b.keys}; final keys = {...a.keys, ...b.keys};
final ret = <String>[]; final ret = <String>[];
for (final key in keys) { for (final key in keys) {
@ -161,7 +161,7 @@ class KdbxEntry extends KdbxObject {
icon.set(KdbxIcon.Key); icon.set(KdbxIcon.Key);
} }
KdbxEntry.read(KdbxReadWriteContext ctx, KdbxGroup parent, XmlElement node, KdbxEntry.read(KdbxReadWriteContext ctx, KdbxGroup? parent, XmlElement node,
{this.isHistoryEntry = false}) {this.isHistoryEntry = false})
: history = [], : history = [],
super.read(ctx, parent, node) { super.read(ctx, parent, node) {
@ -195,9 +195,9 @@ class KdbxEntry extends KdbxObject {
.findElements(KdbxXml.NODE_HISTORY) .findElements(KdbxXml.NODE_HISTORY)
.singleOrNull .singleOrNull
?.findElements('Entry') ?.findElements('Entry')
?.map((entry) => .map((entry) =>
KdbxEntry.read(ctx, parent, entry, isHistoryEntry: true)) KdbxEntry.read(ctx, parent, entry, isHistoryEntry: true))
?.toList() ?? .toList() ??
[]); []);
} }
@ -211,7 +211,7 @@ class KdbxEntry extends KdbxObject {
StringNode get tags => StringNode(this, 'Tags'); StringNode get tags => StringNode(this, 'Tags');
@override @override
set file(KdbxFile file) { set file(KdbxFile? file) {
super.file = file; super.file = file;
// TODO this looks like some weird workaround, get rid of the // TODO this looks like some weird workaround, get rid of the
// `file` reference. // `file` reference.
@ -243,9 +243,9 @@ class KdbxEntry extends KdbxObject {
value.attributes.add( value.attributes.add(
XmlAttribute(XmlName(KdbxXml.ATTR_PROTECTED), KdbxXml.VALUE_TRUE)); XmlAttribute(XmlName(KdbxXml.ATTR_PROTECTED), KdbxXml.VALUE_TRUE));
KdbxFile.setProtectedValueForNode( KdbxFile.setProtectedValueForNode(
value, stringEntry.value as ProtectedValue); value, stringEntry.value as ProtectedValue?);
} else if (stringEntry.value is StringValue) { } else if (stringEntry.value is StringValue) {
value.children.add(XmlText(stringEntry.value.getText())); value.children.add(XmlText(stringEntry.value!.getText()!));
} }
return XmlElement(XmlName(KdbxXml.NODE_STRING)) return XmlElement(XmlName(KdbxXml.NODE_STRING))
..children.addAll([ ..children.addAll([
@ -258,7 +258,7 @@ class KdbxEntry extends KdbxObject {
final key = binaryEntry.key; final key = binaryEntry.key;
final binary = binaryEntry.value; final binary = binaryEntry.value;
final value = XmlElement(XmlName(KdbxXml.NODE_VALUE)); final value = XmlElement(XmlName(KdbxXml.NODE_VALUE));
if (binary.isInline) { if (binary.isInline!) {
binary.saveToXml(value); binary.saveToXml(value);
} else { } else {
final binaryIndex = ctx.findBinaryId(binary); final binaryIndex = ctx.findBinaryId(binary);
@ -279,23 +279,23 @@ class KdbxEntry extends KdbxObject {
return el; return el;
} }
final Map<KdbxKey, StringValue> _strings = {}; final Map<KdbxKey, StringValue?> _strings = {};
final Map<KdbxKey, KdbxBinary> _binaries = {}; final Map<KdbxKey, KdbxBinary> _binaries = {};
Iterable<MapEntry<KdbxKey, KdbxBinary>> get binaryEntries => Iterable<MapEntry<KdbxKey, KdbxBinary>> get binaryEntries =>
_binaries.entries; _binaries.entries;
KdbxBinary getBinary(KdbxKey key) => _binaries[key]; KdbxBinary? getBinary(KdbxKey key) => _binaries[key];
// Map<KdbxKey, StringValue> get strings => UnmodifiableMapView(_strings); // Map<KdbxKey, StringValue> get strings => UnmodifiableMapView(_strings);
Iterable<MapEntry<KdbxKey, StringValue>> get stringEntries => Iterable<MapEntry<KdbxKey, StringValue?>> get stringEntries =>
_strings.entries; _strings.entries;
StringValue getString(KdbxKey key) => _strings[key]; StringValue? getString(KdbxKey key) => _strings[key];
void setString(KdbxKey key, StringValue value) { void setString(KdbxKey key, StringValue? value) {
assert(key != null); assert(key != null);
if (_strings[key] == value) { if (_strings[key] == value) {
_logger.finest('Value did not change for $key'); _logger.finest('Value did not change for $key');
@ -318,7 +318,7 @@ class KdbxEntry extends KdbxObject {
void removeString(KdbxKey key) => setString(key, null); void removeString(KdbxKey key) => setString(key, null);
String _plainValue(KdbxKey key) { String? _plainValue(KdbxKey key) {
final value = _strings[key]; final value = _strings[key];
if (value is PlainValue) { if (value is PlainValue) {
return value.getText(); return value.getText();
@ -326,17 +326,17 @@ class KdbxEntry extends KdbxObject {
return value?.toString(); return value?.toString();
} }
String get label => String? get label =>
_plainValue(KdbxKeyCommon.TITLE)?.takeUnlessBlank() ?? _plainValue(KdbxKeyCommon.TITLE)?.takeUnlessBlank() ??
_plainValue(KdbxKeyCommon.URL)?.takeUnlessBlank(); _plainValue(KdbxKeyCommon.URL)?.takeUnlessBlank();
set label(String label) => setString(KdbxKeyCommon.TITLE, PlainValue(label)); set label(String? label) => setString(KdbxKeyCommon.TITLE, PlainValue(label));
/// Creates a new binary and adds it to this entry. /// Creates a new binary and adds it to this entry.
KdbxBinary createBinary({ KdbxBinary createBinary({
@required bool isProtected, required bool isProtected,
@required String name, required String name,
@required Uint8List bytes, required Uint8List bytes,
}) { }) {
assert(isProtected != null); assert(isProtected != null);
assert(bytes != null); assert(bytes != null);
@ -349,7 +349,7 @@ class KdbxEntry extends KdbxObject {
value: bytes, value: bytes,
); );
modify(() { modify(() {
file.ctx.addBinary(binary); file!.ctx.addBinary(binary);
_binaries[key] = binary; _binaries[key] = binary;
}); });
return binary; return binary;
@ -381,12 +381,10 @@ class KdbxEntry extends KdbxObject {
throw StateError('Unable to find unique name for $fileName'); throw StateError('Unable to find unique name for $fileName');
} }
static KdbxEntry _findHistoryEntry( static KdbxEntry? _findHistoryEntry(
List<KdbxEntry> history, DateTime lastModificationTime) => List<KdbxEntry> history, DateTime? lastModificationTime) =>
history.firstWhere( history.firstWhereOrNull((history) =>
(history) => history.times.lastModificationTime.get() == lastModificationTime);
history.times.lastModificationTime.get() == lastModificationTime,
orElse: () => null);
@override @override
void merge(MergeContext mergeContext, KdbxEntry other) { void merge(MergeContext mergeContext, KdbxEntry other) {
@ -403,7 +401,7 @@ class KdbxEntry extends KdbxObject {
if (historyEntry == null) { if (historyEntry == null) {
// it seems like we don't know about that state, so we have to add // it seems like we don't know about that state, so we have to add
// it to history. // it to history.
history.add(other.cloneInto(parent, toHistoryEntry: true)); history.add(other.cloneInto(parent!, toHistoryEntry: true));
} }
} else { } else {
_logger.finest('$this has no changes.'); _logger.finest('$this has no changes.');
@ -418,13 +416,13 @@ class KdbxEntry extends KdbxObject {
debug: 'merge in history ' debug: 'merge in history '
'${otherHistoryEntry.times.lastModificationTime.get()}', '${otherHistoryEntry.times.lastModificationTime.get()}',
); );
history.add(otherHistoryEntry.cloneInto(parent, toHistoryEntry: true)); history.add(otherHistoryEntry.cloneInto(parent!, toHistoryEntry: true));
} }
} }
mergeContext.markAsMerged(this); mergeContext.markAsMerged(this);
} }
String debugLabel() => label ?? _plainValue(KdbxKeyCommon.USER_NAME); String? debugLabel() => label ?? _plainValue(KdbxKeyCommon.USER_NAME);
@override @override
String toString() { String toString() {

10
lib/src/kdbx_file.dart

@ -26,12 +26,12 @@ class KdbxFile {
static final protectedValues = Expando<ProtectedValue>(); static final protectedValues = Expando<ProtectedValue>();
static ProtectedValue protectedValueForNode(xml.XmlElement node) { static ProtectedValue? protectedValueForNode(xml.XmlElement node) {
return protectedValues[node]; return protectedValues[node];
} }
static void setProtectedValueForNode( static void setProtectedValueForNode(
xml.XmlElement node, ProtectedValue value) { xml.XmlElement node, ProtectedValue? value) {
protectedValues[node] = value; protectedValues[node] = value;
} }
@ -77,10 +77,10 @@ class KdbxFile {
_dirtyObjectsChanged.close(); _dirtyObjectsChanged.close();
} }
CachedValue<KdbxGroup> _recycleBin; CachedValue<KdbxGroup>? _recycleBin;
/// Returns the recycle bin, if it exists, null otherwise. /// Returns the recycle bin, if it exists, null otherwise.
KdbxGroup get recycleBin => (_recycleBin ??= _findRecycleBin()).value; KdbxGroup? get recycleBin => (_recycleBin ??= _findRecycleBin()).value;
CachedValue<KdbxGroup> _findRecycleBin() { CachedValue<KdbxGroup> _findRecycleBin() {
final uuid = body.meta.recycleBinUUID.get(); final uuid = body.meta.recycleBinUUID.get();
@ -146,5 +146,5 @@ class CachedValue<T> {
CachedValue.withNull() : value = null; CachedValue.withNull() : value = null;
CachedValue.withValue(this.value) : assert(value != null); CachedValue.withValue(this.value) : assert(value != null);
final T value; final T? value;
} }

110
lib/src/kdbx_format.dart

@ -5,6 +5,7 @@ import 'dart:typed_data';
import 'package:archive/archive.dart'; import 'package:archive/archive.dart';
import 'package:argon2_ffi_base/argon2_ffi_base.dart'; import 'package:argon2_ffi_base/argon2_ffi_base.dart';
import 'package:collection/collection.dart' show IterableExtension;
import 'package:convert/convert.dart' as convert; import 'package:convert/convert.dart' as convert;
import 'package:crypto/crypto.dart' as crypto; import 'package:crypto/crypto.dart' as crypto;
import 'package:kdbx/kdbx.dart'; import 'package:kdbx/kdbx.dart';
@ -36,7 +37,7 @@ final _logger = Logger('kdbx.format');
abstract class Credentials { abstract class Credentials {
factory Credentials(ProtectedValue password) => factory Credentials(ProtectedValue password) =>
Credentials.composite(password, null); //PasswordCredentials(password); Credentials.composite(password, null); //PasswordCredentials(password);
factory Credentials.composite(ProtectedValue password, Uint8List keyFile) => factory Credentials.composite(ProtectedValue password, Uint8List? keyFile) =>
KeyFileComposite( KeyFileComposite(
password: password == null ? null : PasswordCredentials(password), password: password == null ? null : PasswordCredentials(password),
keyFile: keyFile == null ? null : KeyFileCredentials(keyFile), keyFile: keyFile == null ? null : KeyFileCredentials(keyFile),
@ -48,10 +49,10 @@ abstract class Credentials {
} }
class KeyFileComposite implements Credentials { class KeyFileComposite implements Credentials {
KeyFileComposite({@required this.password, @required this.keyFile}); KeyFileComposite({required this.password, required this.keyFile});
PasswordCredentials password; PasswordCredentials? password;
KeyFileCredentials keyFile; KeyFileCredentials? keyFile;
@override @override
Uint8List getHash() { Uint8List getHash() {
@ -70,8 +71,8 @@ class KeyFileComposite implements Credentials {
/// Context used during reading and writing. /// Context used during reading and writing.
class KdbxReadWriteContext { class KdbxReadWriteContext {
KdbxReadWriteContext({ KdbxReadWriteContext({
@required List<KdbxBinary> binaries, required List<KdbxBinary> binaries,
@required this.header, required this.header,
}) : assert(binaries != null), }) : assert(binaries != null),
assert(header != null), assert(header != null),
_binaries = binaries, _binaries = binaries,
@ -80,7 +81,7 @@ class KdbxReadWriteContext {
static final kdbxContext = Expando<KdbxReadWriteContext>(); static final kdbxContext = Expando<KdbxReadWriteContext>();
static KdbxReadWriteContext kdbxContextForNode(xml.XmlNode node) { static KdbxReadWriteContext kdbxContextForNode(xml.XmlNode node) {
final ret = kdbxContext[node.document]; final ret = kdbxContext[node.document!];
if (ret == null) { if (ret == null) {
throw StateError('Unable to locate kdbx context for document.'); throw StateError('Unable to locate kdbx context for document.');
} }
@ -89,7 +90,7 @@ class KdbxReadWriteContext {
static void setKdbxContextForNode( static void setKdbxContextForNode(
xml.XmlNode node, KdbxReadWriteContext ctx) { xml.XmlNode node, KdbxReadWriteContext ctx) {
kdbxContext[node.document] = ctx; kdbxContext[node.document!] = ctx;
} }
// TODO make [_binaries] and [_deletedObjects] late init :-) // TODO make [_binaries] and [_deletedObjects] late init :-)
@ -109,7 +110,7 @@ class KdbxReadWriteContext {
_deletedObjects.addAll(deletedObjects); _deletedObjects.addAll(deletedObjects);
} }
KdbxBinary binaryById(int id) { KdbxBinary? binaryById(int id) {
if (id >= _binaries.length) { if (id >= _binaries.length) {
return null; return null;
} }
@ -120,21 +121,20 @@ class KdbxReadWriteContext {
_binaries.add(binary); _binaries.add(binary);
} }
KdbxBinary findBinaryByValue(KdbxBinary binary) { KdbxBinary? findBinaryByValue(KdbxBinary binary) {
// TODO create a hashset or map? // TODO create a hashset or map?
return _binaries.firstWhere((element) => element.valueEqual(binary), return _binaries.firstWhereOrNull((element) => element.valueEqual(binary));
orElse: () => null);
} }
/// finds the ID of the given binary. /// finds the ID of the given binary.
/// if it can't be found, [KdbxCorruptedFileException] is thrown. /// if it can't be found, [KdbxCorruptedFileException] is thrown.
int findBinaryId(KdbxBinary binary) { int findBinaryId(KdbxBinary binary) {
assert(binary != null); assert(binary != null);
assert(!binary.isInline); assert(!binary.isInline!);
final id = _binaries.indexOf(binary); final id = _binaries.indexOf(binary);
if (id < 0) { if (id < 0) {
throw KdbxCorruptedFileException('Unable to find binary.' throw KdbxCorruptedFileException('Unable to find binary.'
' (${binary.value.length},${binary.isInline})'); ' (${binary.value!.length},${binary.isInline})');
} }
return id; return id;
} }
@ -244,7 +244,7 @@ class KdbxBody extends KdbxNode {
final xmlBytes = utf8.encode(xml.toXmlString()); final xmlBytes = utf8.encode(xml.toXmlString());
final compressedBytes = (kdbxFile.header.compression == Compression.gzip final compressedBytes = (kdbxFile.header.compression == Compression.gzip
? KdbxFormat._gzipEncode(xmlBytes as Uint8List) ? KdbxFormat._gzipEncode(xmlBytes as Uint8List)
: xmlBytes) as Uint8List; : xmlBytes) as Uint8List?;
final encrypted = await _encryptV3(kdbxFile, compressedBytes); final encrypted = await _encryptV3(kdbxFile, compressedBytes);
writer.writeBytes(encrypted); writer.writeBytes(encrypted);
@ -271,34 +271,34 @@ class KdbxBody extends KdbxNode {
} }
Future<Uint8List> _encryptV3( Future<Uint8List> _encryptV3(
KdbxFile kdbxFile, Uint8List compressedBytes) async { KdbxFile kdbxFile, Uint8List? compressedBytes) async {
final byteWriter = WriterHelper(); final byteWriter = WriterHelper();
byteWriter.writeBytes( byteWriter.writeBytes(
kdbxFile.header.fields[HeaderFields.StreamStartBytes].bytes); kdbxFile.header.fields[HeaderFields.StreamStartBytes]!.bytes!);
HashedBlockReader.writeBlocks(ReaderHelper(compressedBytes), byteWriter); HashedBlockReader.writeBlocks(ReaderHelper(compressedBytes), byteWriter);
final bytes = byteWriter.output.toBytes(); final bytes = byteWriter.output.toBytes();
final masterKey = await KdbxFormat._generateMasterKeyV3( final masterKey = await KdbxFormat._generateMasterKeyV3(
kdbxFile.header, kdbxFile.credentials); kdbxFile.header, kdbxFile.credentials);
final encrypted = KdbxFormat._encryptDataAes(masterKey, bytes, final encrypted = KdbxFormat._encryptDataAes(masterKey, bytes,
kdbxFile.header.fields[HeaderFields.EncryptionIV].bytes); kdbxFile.header.fields[HeaderFields.EncryptionIV]!.bytes!);
return encrypted; return encrypted;
} }
Uint8List _encryptV4( Uint8List _encryptV4(
KdbxFile kdbxFile, Uint8List compressedBytes, Uint8List cipherKey) { KdbxFile kdbxFile, Uint8List? compressedBytes, Uint8List cipherKey) {
final header = kdbxFile.header; final header = kdbxFile.header;
final cipher = header.cipher; final cipher = header.cipher;
if (cipher == Cipher.aes) { if (cipher == Cipher.aes) {
_logger.fine('We need AES'); _logger.fine('We need AES');
final result = kdbxFile.kdbxFormat final result = kdbxFile.kdbxFormat
._encryptContentV4Aes(header, cipherKey, compressedBytes); ._encryptContentV4Aes(header, cipherKey, compressedBytes!);
// _logger.fine('Result: ${ByteUtils.toHexList(result)}'); // _logger.fine('Result: ${ByteUtils.toHexList(result)}');
return result; return result;
} else if (cipher == Cipher.chaCha20) { } else if (cipher == Cipher.chaCha20) {
_logger.fine('We need chacha20'); _logger.fine('We need chacha20');
return kdbxFile.kdbxFormat return kdbxFile.kdbxFormat
.transformContentV4ChaCha20(header, compressedBytes, cipherKey); .transformContentV4ChaCha20(header, compressedBytes!, cipherKey);
} else { } else {
throw UnsupportedError('Unsupported cipherId $cipher'); throw UnsupportedError('Unsupported cipherId $cipher');
} }
@ -314,7 +314,7 @@ class KdbxBody extends KdbxNode {
// sync deleted objects. // sync deleted objects.
final deleted = final deleted =
Map.fromEntries(ctx._deletedObjects.map((e) => MapEntry(e.uuid, e))); Map.fromEntries(ctx._deletedObjects.map((e) => MapEntry(e.uuid, e)));
final incomingDeleted = <KdbxUuid, KdbxDeletedObject>{}; final incomingDeleted = <KdbxUuid?, KdbxDeletedObject>{};
for (final obj in other.ctx._deletedObjects) { for (final obj in other.ctx._deletedObjects) {
if (!deleted.containsKey(obj.uuid)) { if (!deleted.containsKey(obj.uuid)) {
@ -335,7 +335,7 @@ class KdbxBody extends KdbxNode {
if (ctx.findBinaryByValue(binary) == null) { if (ctx.findBinaryByValue(binary) == null) {
ctx.addBinary(binary); ctx.addBinary(binary);
mergeContext.trackChange(this, mergeContext.trackChange(this,
debug: 'adding new binary ${binary.value.length}'); debug: 'adding new binary ${binary.value!.length}');
} }
} }
meta.merge(other.meta); meta.merge(other.meta);
@ -343,7 +343,7 @@ class KdbxBody extends KdbxNode {
// remove deleted objects // remove deleted objects
for (final incomingDelete in incomingDeleted.values) { for (final incomingDelete in incomingDeleted.values) {
final object = mergeContext.objectIndex[incomingDelete.uuid]; final object = mergeContext.objectIndex![incomingDelete.uuid!];
mergeContext.trackChange(object, debug: 'was deleted.'); mergeContext.trackChange(object, debug: 'was deleted.');
} }
@ -352,11 +352,11 @@ class KdbxBody extends KdbxNode {
_logger.info('Finished merging:\n${mergeContext.debugChanges()}'); _logger.info('Finished merging:\n${mergeContext.debugChanges()}');
final incomingObjects = other._createObjectIndex(); final incomingObjects = other._createObjectIndex();
_logger.info('Merged: ${mergeContext.merged} vs. ' _logger.info('Merged: ${mergeContext.merged} vs. '
'(local objects: ${mergeContext.objectIndex.length}, ' '(local objects: ${mergeContext.objectIndex!.length}, '
'incoming objects: ${incomingObjects.length})'); 'incoming objects: ${incomingObjects.length})');
// sanity checks // sanity checks
if (mergeContext.merged.keys.length != mergeContext.objectIndex.length) { if (mergeContext.merged.keys.length != mergeContext.objectIndex!.length) {
// TODO figure out what went wrong. // TODO figure out what went wrong.
} }
return mergeContext; return mergeContext;
@ -411,23 +411,23 @@ class KdbxBody extends KdbxNode {
abstract class OverwriteContext { abstract class OverwriteContext {
const OverwriteContext(); const OverwriteContext();
static const noop = OverwriteContextNoop(); static const noop = OverwriteContextNoop();
void trackChange(KdbxObject object, {String node, String debug}); void trackChange(KdbxObject object, {String? node, String? debug});
} }
class OverwriteContextNoop implements OverwriteContext { class OverwriteContextNoop implements OverwriteContext {
const OverwriteContextNoop(); const OverwriteContextNoop();
@override @override
void trackChange(KdbxObject object, {String node, String debug}) {} void trackChange(KdbxObject object, {String? node, String? debug}) {}
} }
class MergeChange { class MergeChange {
MergeChange({this.object, this.node, this.debug}); MergeChange({this.object, this.node, this.debug});
final KdbxNode object; final KdbxNode? object;
/// the name of the subnode of [object]. /// the name of the subnode of [object].
final String node; final String? node;
final String debug; final String? debug;
String debugString() { String debugString() {
return [node, debug].where((e) => e != null).join(' '); return [node, debug].where((e) => e != null).join(' ');
@ -436,8 +436,8 @@ class MergeChange {
class MergeContext implements OverwriteContext { class MergeContext implements OverwriteContext {
MergeContext({this.objectIndex, this.deletedObjects}); MergeContext({this.objectIndex, this.deletedObjects});
final Map<KdbxUuid, KdbxObject> objectIndex; final Map<KdbxUuid, KdbxObject>? objectIndex;
final Map<KdbxUuid, KdbxDeletedObject> deletedObjects; final Map<KdbxUuid?, KdbxDeletedObject>? deletedObjects;
final Map<KdbxUuid, KdbxObject> merged = {}; final Map<KdbxUuid, KdbxObject> merged = {};
final List<MergeChange> changes = []; final List<MergeChange> changes = [];
@ -450,7 +450,7 @@ class MergeContext implements OverwriteContext {
} }
@override @override
void trackChange(KdbxNode object, {String node, String debug}) { void trackChange(KdbxNode? object, {String? node, String? debug}) {
changes.add(MergeChange( changes.add(MergeChange(
object: object, object: object,
node: node, node: node,
@ -480,7 +480,7 @@ class _KeysV4 {
class KdbxFormat { class KdbxFormat {
KdbxFormat([this.argon2]) : assert(kdbxKeyCommonAssertConsistency()); KdbxFormat([this.argon2]) : assert(kdbxKeyCommonAssertConsistency());
final Argon2 argon2; final Argon2? argon2;
static bool dartWebWorkaround = false; static bool dartWebWorkaround = false;
/// Creates a new, empty [KdbxFile] with default settings. /// Creates a new, empty [KdbxFile] with default settings.
@ -488,8 +488,8 @@ class KdbxFormat {
KdbxFile create( KdbxFile create(
Credentials credentials, Credentials credentials,
String name, { String name, {
String generator, String? generator,
KdbxHeader header, KdbxHeader? header,
}) { }) {
header ??= argon2 == null ? KdbxHeader.createV3() : KdbxHeader.createV4(); header ??= argon2 == null ? KdbxHeader.createV3() : KdbxHeader.createV4();
final ctx = KdbxReadWriteContext(binaries: [], header: header); final ctx = KdbxReadWriteContext(binaries: [], header: header);
@ -546,7 +546,7 @@ class KdbxFormat {
throw UnsupportedError('Unsupported version ${header.version}'); throw UnsupportedError('Unsupported version ${header.version}');
} else if (file.header.version < KdbxVersion.V4) { } else if (file.header.version < KdbxVersion.V4) {
final streamKey = final streamKey =
file.header.fields[HeaderFields.ProtectedStreamKey].bytes; file.header.fields[HeaderFields.ProtectedStreamKey]!.bytes!;
final gen = ProtectedSaltGenerator(streamKey); final gen = ProtectedSaltGenerator(streamKey);
body.meta.headerHash.set(headerHash.buffer); body.meta.headerHash.set(headerHash.buffer);
@ -712,7 +712,7 @@ class KdbxFormat {
Uint8List transformContentV4ChaCha20( Uint8List transformContentV4ChaCha20(
KdbxHeader header, Uint8List encrypted, Uint8List cipherKey) { KdbxHeader header, Uint8List encrypted, Uint8List cipherKey) {
final encryptionIv = header.fields[HeaderFields.EncryptionIV].bytes; final encryptionIv = header.fields[HeaderFields.EncryptionIV]!.bytes!;
final chaCha = ChaCha7539Engine() final chaCha = ChaCha7539Engine()
..init(true, ParametersWithIV(KeyParameter(cipherKey), encryptionIv)); ..init(true, ParametersWithIV(KeyParameter(cipherKey), encryptionIv));
return chaCha.process(encrypted); return chaCha.process(encrypted);
@ -735,7 +735,7 @@ class KdbxFormat {
Future<_KeysV4> _computeKeysV4( Future<_KeysV4> _computeKeysV4(
KdbxHeader header, Credentials credentials) async { KdbxHeader header, Credentials credentials) async {
final masterSeed = header.fields[HeaderFields.MasterSeed].bytes; final masterSeed = header.fields[HeaderFields.MasterSeed]!.bytes!;
final kdfParameters = header.readKdfParameters; final kdfParameters = header.readKdfParameters;
if (masterSeed.length != 32) { if (masterSeed.length != 32) {
throw const FormatException('Master seed must be 32 bytes.'); throw const FormatException('Master seed must be 32 bytes.');
@ -743,7 +743,7 @@ class KdbxFormat {
final credentialHash = credentials.getHash(); final credentialHash = credentials.getHash();
final key = final key =
await KeyEncrypterKdf(argon2).encrypt(credentialHash, kdfParameters); await KeyEncrypterKdf(argon2!).encrypt(credentialHash, kdfParameters);
// final keyWithSeed = Uint8List(65); // final keyWithSeed = Uint8List(65);
// keyWithSeed.replaceRange(0, masterSeed.length, masterSeed); // keyWithSeed.replaceRange(0, masterSeed.length, masterSeed);
@ -762,9 +762,9 @@ class KdbxFormat {
final protectedValueEncryption = header.innerRandomStreamEncryption; final protectedValueEncryption = header.innerRandomStreamEncryption;
final streamKey = header.protectedStreamKey; final streamKey = header.protectedStreamKey;
if (protectedValueEncryption == ProtectedValueEncryption.salsa20) { if (protectedValueEncryption == ProtectedValueEncryption.salsa20) {
return ProtectedSaltGenerator(streamKey); return ProtectedSaltGenerator(streamKey!);
} else if (protectedValueEncryption == ProtectedValueEncryption.chaCha20) { } else if (protectedValueEncryption == ProtectedValueEncryption.chaCha20) {
return ProtectedSaltGenerator.chacha20(streamKey); return ProtectedSaltGenerator.chacha20(streamKey!);
} else { } else {
throw KdbxUnsupportedException( throw KdbxUnsupportedException(
'Inner encryption: $protectedValueEncryption'); 'Inner encryption: $protectedValueEncryption');
@ -789,7 +789,7 @@ class KdbxFormat {
KdbxFile.protectedValues[el] = ProtectedValue.fromString(pw); KdbxFile.protectedValues[el] = ProtectedValue.fromString(pw);
} catch (e, stackTrace) { } catch (e, stackTrace) {
final stringKey = final stringKey =
el.parentElement.singleElement(KdbxXml.NODE_KEY)?.text; el.parentElement!.singleElement(KdbxXml.NODE_KEY)?.text;
final uuid = el.parentElement?.parentElement final uuid = el.parentElement?.parentElement
?.singleElement(KdbxXml.NODE_UUID) ?.singleElement(KdbxXml.NODE_UUID)
?.text; ?.text;
@ -811,14 +811,14 @@ class KdbxFormat {
final kdbxMeta = KdbxMeta.read(meta, ctx); final kdbxMeta = KdbxMeta.read(meta, ctx);
// kdbx < 4 has binaries in the meta section, >= 4 in the binary header. // kdbx < 4 has binaries in the meta section, >= 4 in the binary header.
final binaries = kdbxMeta.binaries?.isNotEmpty == true final binaries = kdbxMeta.binaries?.isNotEmpty == true
? kdbxMeta.binaries ? kdbxMeta.binaries!
: header.innerHeader.binaries : header.innerHeader.binaries
.map((e) => KdbxBinary.readBinaryInnerHeader(e)); .map((e) => KdbxBinary.readBinaryInnerHeader(e));
final deletedObjects = root final deletedObjects = root
.findElements(KdbxXml.NODE_DELETED_OBJECTS) .findElements(KdbxXml.NODE_DELETED_OBJECTS)
.singleOrNull .singleOrNull
?.let((el) => el ?.let((el) => el!
.findElements(KdbxDeletedObject.NODE_NAME) .findElements(KdbxDeletedObject.NODE_NAME)
.map((node) => KdbxDeletedObject.read(node, ctx))) ?? .map((node) => KdbxDeletedObject.read(node, ctx))) ??
[]; [];
@ -832,14 +832,14 @@ class KdbxFormat {
Uint8List _decryptContent( Uint8List _decryptContent(
KdbxHeader header, Uint8List masterKey, Uint8List encryptedPayload) { KdbxHeader header, Uint8List masterKey, Uint8List encryptedPayload) {
final encryptionIv = header.fields[HeaderFields.EncryptionIV].bytes; final encryptionIv = header.fields[HeaderFields.EncryptionIV]!.bytes!;
final decryptCipher = CBCBlockCipher(AESFastEngine()); final decryptCipher = CBCBlockCipher(AESFastEngine());
decryptCipher.init( decryptCipher.init(
false, ParametersWithIV(KeyParameter(masterKey), encryptionIv)); false, ParametersWithIV(KeyParameter(masterKey), encryptionIv));
final paddedDecrypted = final paddedDecrypted =
AesHelper.processBlocks(decryptCipher, encryptedPayload); AesHelper.processBlocks(decryptCipher, encryptedPayload);
final streamStart = header.fields[HeaderFields.StreamStartBytes].bytes; final streamStart = header.fields[HeaderFields.StreamStartBytes]!.bytes!;
if (paddedDecrypted.lengthInBytes < streamStart.lengthInBytes) { if (paddedDecrypted.lengthInBytes < streamStart.lengthInBytes) {
_logger.warning( _logger.warning(
@ -861,7 +861,7 @@ class KdbxFormat {
Uint8List _decryptContentV4( Uint8List _decryptContentV4(
KdbxHeader header, Uint8List cipherKey, Uint8List encryptedPayload) { KdbxHeader header, Uint8List cipherKey, Uint8List encryptedPayload) {
final encryptionIv = header.fields[HeaderFields.EncryptionIV].bytes; final encryptionIv = header.fields[HeaderFields.EncryptionIV]!.bytes!;
final decryptCipher = CBCBlockCipher(AESFastEngine()); final decryptCipher = CBCBlockCipher(AESFastEngine());
decryptCipher.init( decryptCipher.init(
@ -876,7 +876,7 @@ class KdbxFormat {
/// TODO combine this with [_decryptContentV4] (or [_encryptDataAes]?) /// TODO combine this with [_decryptContentV4] (or [_encryptDataAes]?)
Uint8List _encryptContentV4Aes( Uint8List _encryptContentV4Aes(
KdbxHeader header, Uint8List cipherKey, Uint8List bytes) { KdbxHeader header, Uint8List cipherKey, Uint8List bytes) {
final encryptionIv = header.fields[HeaderFields.EncryptionIV].bytes; final encryptionIv = header.fields[HeaderFields.EncryptionIV]!.bytes!;
final encryptCypher = CBCBlockCipher(AESFastEngine()); final encryptCypher = CBCBlockCipher(AESFastEngine());
encryptCypher.init( encryptCypher.init(
true, ParametersWithIV(KeyParameter(cipherKey), encryptionIv)); true, ParametersWithIV(KeyParameter(cipherKey), encryptionIv));
@ -887,10 +887,10 @@ class KdbxFormat {
static Future<Uint8List> _generateMasterKeyV3( static Future<Uint8List> _generateMasterKeyV3(
KdbxHeader header, Credentials credentials) async { KdbxHeader header, Credentials credentials) async {
final rounds = header.v3KdfTransformRounds; final rounds = header.v3KdfTransformRounds;
final seed = header.fields[HeaderFields.TransformSeed].bytes; final seed = header.fields[HeaderFields.TransformSeed]!.bytes;
final masterSeed = header.fields[HeaderFields.MasterSeed].bytes; final masterSeed = header.fields[HeaderFields.MasterSeed]!.bytes!;
_logger.finer( _logger.finer(
'Rounds: $rounds (${ByteUtils.toHexList(header.fields[HeaderFields.TransformRounds].bytes)})'); 'Rounds: $rounds (${ByteUtils.toHexList(header.fields[HeaderFields.TransformRounds]!.bytes)})');
final transformedKey = await KeyEncrypterKdf.encryptAesAsync( final transformedKey = await KeyEncrypterKdf.encryptAesAsync(
EncryptAesArgs(seed, credentials.getHash(), rounds)); EncryptAesArgs(seed, credentials.getHash(), rounds));
@ -909,9 +909,9 @@ class KdbxFormat {
encryptCipher, AesHelper.pad(payload, encryptCipher.blockSize)); encryptCipher, AesHelper.pad(payload, encryptCipher.blockSize));
} }
static Uint8List _gzipEncode(Uint8List bytes) { static Uint8List? _gzipEncode(Uint8List bytes) {
if (dartWebWorkaround) { if (dartWebWorkaround) {
return GZipEncoder().encode(bytes) as Uint8List; return GZipEncoder().encode(bytes) as Uint8List?;
} }
return GZipCodec().encode(bytes) as Uint8List; return GZipCodec().encode(bytes) as Uint8List;
} }

14
lib/src/kdbx_group.dart

@ -14,9 +14,9 @@ final _logger = Logger('kdbx_group');
class KdbxGroup extends KdbxObject { class KdbxGroup extends KdbxObject {
KdbxGroup.create({ KdbxGroup.create({
@required KdbxReadWriteContext ctx, required KdbxReadWriteContext ctx,
@required KdbxGroup parent, required KdbxGroup? parent,
@required String name, required String? name,
}) : super.create( }) : super.create(
ctx, ctx,
parent?.file, parent?.file,
@ -28,7 +28,7 @@ class KdbxGroup extends KdbxObject {
expanded.set(true); expanded.set(true);
} }
KdbxGroup.read(KdbxReadWriteContext ctx, KdbxGroup parent, XmlElement node) KdbxGroup.read(KdbxReadWriteContext ctx, KdbxGroup? parent, XmlElement node)
: super.read(ctx, parent, node) { : super.read(ctx, parent, node) {
node node
.findElements(KdbxXml.NODE_GROUP) .findElements(KdbxXml.NODE_GROUP)
@ -131,7 +131,7 @@ class KdbxGroup extends KdbxObject {
void _mergeSubObjects<T extends KdbxObject>( void _mergeSubObjects<T extends KdbxObject>(
MergeContext mergeContext, List<T> me, List<T> other, MergeContext mergeContext, List<T> me, List<T> other,
{@required T Function(T obj) importToHere}) { {required T Function(T obj) importToHere}) {
// possibilities: // possibilities:
// 1. Not changed at all 👍 // 1. Not changed at all 👍
// 2. Deleted in other // 2. Deleted in other
@ -146,7 +146,7 @@ class KdbxGroup extends KdbxObject {
if (meObj == null) { if (meObj == null) {
// moved or deleted. // moved or deleted.
final movedObj = mergeContext.objectIndex[otherObj.uuid]; final movedObj = mergeContext.objectIndex![otherObj.uuid];
if (movedObj == null) { if (movedObj == null) {
// item was created in the other file. we have to import it // item was created in the other file. we have to import it
final newMeObject = importToHere(otherObj); final newMeObject = importToHere(otherObj);
@ -156,7 +156,7 @@ class KdbxGroup extends KdbxObject {
// item was moved. // item was moved.
if (otherObj.wasMovedAfter(movedObj)) { if (otherObj.wasMovedAfter(movedObj)) {
// item was moved in the other file, so we have to move it here. // item was moved in the other file, so we have to move it here.
file.move(movedObj, this); file!.move(movedObj, this);
mergeContext.trackChange(movedObj, debug: 'moved to another group'); mergeContext.trackChange(movedObj, debug: 'moved to another group');
} else { } else {
// item was moved in this file, so nothing to do. // item was moved in this file, so nothing to do.

80
lib/src/kdbx_header.dart

@ -42,7 +42,7 @@ final _compressionIdsById =
_compressionIds.map((key, value) => MapEntry(value, key)); _compressionIds.map((key, value) => MapEntry(value, key));
extension on Compression { extension on Compression {
int get id => _compressionIds[this]; int? get id => _compressionIds[this];
} }
/// how protected values are encrypted in the xml. /// how protected values are encrypted in the xml.
@ -134,7 +134,7 @@ class HeaderField implements HeaderFieldBase<HeaderFields> {
@override @override
final HeaderFields field; final HeaderFields field;
final Uint8List bytes; final Uint8List? bytes;
String get name => field.toString(); String get name => field.toString();
} }
@ -144,19 +144,19 @@ class InnerHeaderField implements HeaderFieldBase<InnerHeaderFields> {
@override @override
final InnerHeaderFields field; final InnerHeaderFields field;
final Uint8List bytes; final Uint8List? bytes;
String get name => field.toString(); String get name => field.toString();
} }
class KdbxHeader { class KdbxHeader {
KdbxHeader({ KdbxHeader({
@required this.sig1, required this.sig1,
@required this.sig2, required this.sig2,
@required KdbxVersion version, required KdbxVersion version,
@required this.fields, required this.fields,
@required this.endPos, required this.endPos,
Map<InnerHeaderFields, InnerHeaderField> innerFields, Map<InnerHeaderFields, InnerHeaderField>? innerFields,
}) : _version = version, }) : _version = version,
innerHeader = InnerHeader(fields: innerFields ?? {}); innerHeader = InnerHeader(fields: innerFields ?? {});
@ -322,10 +322,10 @@ class KdbxHeader {
void _writeInnerField(WriterHelper writer, InnerHeaderField value) { void _writeInnerField(WriterHelper writer, InnerHeaderField value) {
final field = value.field; final field = value.field;
_logger.finer( _logger.finer(
'Writing header $field (${field.index}) (${value.bytes.lengthInBytes})'); 'Writing header $field (${field.index}) (${value.bytes!.lengthInBytes})');
writer.writeUint8(field.index); writer.writeUint8(field.index);
_writeFieldSize(writer, value.bytes.lengthInBytes); _writeFieldSize(writer, value.bytes!.lengthInBytes);
writer.writeBytes(value.bytes); writer.writeBytes(value.bytes!);
} }
void _writeField(WriterHelper writer, HeaderFields field) { void _writeField(WriterHelper writer, HeaderFields field) {
@ -333,10 +333,10 @@ class KdbxHeader {
if (value == null) { if (value == null) {
return; return;
} }
_logger.finer('Writing header $field (${value.bytes.lengthInBytes})'); _logger.finer('Writing header $field (${value.bytes!.lengthInBytes})');
writer.writeUint8(field.index); writer.writeUint8(field.index);
_writeFieldSize(writer, value.bytes.lengthInBytes); _writeFieldSize(writer, value.bytes!.lengthInBytes);
writer.writeBytes(value.bytes); writer.writeBytes(value.bytes!);
} }
void _writeFieldSize(WriterHelper writer, int size) { void _writeFieldSize(WriterHelper writer, int size) {
@ -348,9 +348,9 @@ class KdbxHeader {
} }
static Map<HeaderFields, HeaderField> _defaultFieldValues() => _headerFields({ static Map<HeaderFields, HeaderField> _defaultFieldValues() => _headerFields({
HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes].toBytes(), HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes]!.toBytes(),
HeaderFields.CompressionFlags: HeaderFields.CompressionFlags:
WriterHelper.singleUint32Bytes(Compression.gzip.id), WriterHelper.singleUint32Bytes(Compression.gzip.id!),
HeaderFields.TransformRounds: WriterHelper.singleUint64Bytes(6000), HeaderFields.TransformRounds: WriterHelper.singleUint64Bytes(6000),
HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes( HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes(
ProtectedValueEncryption.values ProtectedValueEncryption.values
@ -359,9 +359,9 @@ class KdbxHeader {
static Map<HeaderFields, HeaderField> _defaultFieldValuesV4() => static Map<HeaderFields, HeaderField> _defaultFieldValuesV4() =>
_headerFields({ _headerFields({
HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes].toBytes(), HeaderFields.CipherID: CryptoConsts.CIPHER_IDS[Cipher.aes]!.toBytes(),
HeaderFields.CompressionFlags: HeaderFields.CompressionFlags:
WriterHelper.singleUint32Bytes(Compression.gzip.id), WriterHelper.singleUint32Bytes(Compression.gzip.id!),
HeaderFields.KdfParameters: _createKdfDefaultParameters().write(), HeaderFields.KdfParameters: _createKdfDefaultParameters().write(),
// HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes( // HeaderFields.InnerRandomStreamID: WriterHelper.singleUint32Bytes(
// ProtectedValueEncryption.values // ProtectedValueEncryption.values
@ -428,7 +428,7 @@ class KdbxHeader {
ReaderHelper reader, ReaderHelper reader,
KdbxVersion version, KdbxVersion version,
List<TE> fields, List<TE> fields,
T Function(TE field, Uint8List bytes) createField) => T Function(TE field, Uint8List? bytes) createField) =>
Map<TE, T>.fromEntries(readField(reader, version, fields, createField) Map<TE, T>.fromEntries(readField(reader, version, fields, createField)
.map((field) => MapEntry(field.field, field))); .map((field) => MapEntry(field.field, field)));
@ -436,7 +436,7 @@ class KdbxHeader {
ReaderHelper reader, ReaderHelper reader,
KdbxVersion version, KdbxVersion version,
List<TE> fields, List<TE> fields,
T Function(TE field, Uint8List bytes) createField) sync* { T Function(TE field, Uint8List? bytes) createField) sync* {
while (true) { while (true) {
final headerId = reader.readUint8(); final headerId = reader.readUint8();
final bodySize = final bodySize =
@ -472,17 +472,17 @@ class KdbxHeader {
final InnerHeader innerHeader; final InnerHeader innerHeader;
/// end position of the header, if we have been reading from a stream. /// end position of the header, if we have been reading from a stream.
final int endPos; final int? endPos;
Cipher get cipher { Cipher? get cipher {
if (version < KdbxVersion.V4) { if (version < KdbxVersion.V4) {
assert( assert(
CryptoConsts.cipherFromBytes(fields[HeaderFields.CipherID].bytes) == CryptoConsts.cipherFromBytes(fields[HeaderFields.CipherID]!.bytes!) ==
Cipher.aes); Cipher.aes);
return Cipher.aes; return Cipher.aes;
} }
try { try {
return CryptoConsts.cipherFromBytes(fields[HeaderFields.CipherID].bytes); return CryptoConsts.cipherFromBytes(fields[HeaderFields.CipherID]!.bytes!);
} catch (e, stackTrace) { } catch (e, stackTrace) {
_logger.warning( _logger.warning(
'Unable to find cipher. ' 'Unable to find cipher. '
@ -496,39 +496,39 @@ class KdbxHeader {
} }
} }
set cipher(Cipher cipher) { set cipher(Cipher? cipher) {
checkArgument(version >= KdbxVersion.V4 || cipher == Cipher.aes, checkArgument(version >= KdbxVersion.V4 || cipher == Cipher.aes,
message: 'Kdbx 3 only supports aes, tried to set it to $cipher'); message: 'Kdbx 3 only supports aes, tried to set it to $cipher');
_setHeaderField( _setHeaderField(
HeaderFields.CipherID, HeaderFields.CipherID,
CryptoConsts.CIPHER_IDS[cipher].toBytes(), CryptoConsts.CIPHER_IDS[cipher!]!.toBytes(),
); );
} }
Compression get compression { Compression get compression {
final id = final id =
ReaderHelper.singleUint32(fields[HeaderFields.CompressionFlags].bytes); ReaderHelper.singleUint32(fields[HeaderFields.CompressionFlags]!.bytes);
return _compressionIdsById[id] ?? return _compressionIdsById[id] ??
(() => throw KdbxUnsupportedException('invalid compression $id'))(); (() => throw KdbxUnsupportedException('invalid compression $id'))()!;
} }
ProtectedValueEncryption get innerRandomStreamEncryption => ProtectedValueEncryption get innerRandomStreamEncryption =>
ProtectedValueEncryption ProtectedValueEncryption
.values[ReaderHelper.singleUint32(_innerRandomStreamEncryptionBytes)]; .values[ReaderHelper.singleUint32(_innerRandomStreamEncryptionBytes)];
Uint8List get _innerRandomStreamEncryptionBytes => version >= KdbxVersion.V4 Uint8List? get _innerRandomStreamEncryptionBytes => version >= KdbxVersion.V4
? innerHeader.fields[InnerHeaderFields.InnerRandomStreamID].bytes ? innerHeader.fields[InnerHeaderFields.InnerRandomStreamID]!.bytes
: fields[HeaderFields.InnerRandomStreamID].bytes; : fields[HeaderFields.InnerRandomStreamID]!.bytes;
Uint8List get protectedStreamKey => version >= KdbxVersion.V4 Uint8List? get protectedStreamKey => version >= KdbxVersion.V4
? innerHeader.fields[InnerHeaderFields.InnerRandomStreamKey].bytes ? innerHeader.fields[InnerHeaderFields.InnerRandomStreamKey]!.bytes
: fields[HeaderFields.ProtectedStreamKey].bytes; : fields[HeaderFields.ProtectedStreamKey]!.bytes;
VarDictionary get readKdfParameters => VarDictionary.read( VarDictionary get readKdfParameters => VarDictionary.read(
ReaderHelper(fields[HeaderFields.KdfParameters].bytes)); ReaderHelper(fields[HeaderFields.KdfParameters]!.bytes));
int get v3KdfTransformRounds => int get v3KdfTransformRounds =>
ReaderHelper.singleUint64(fields[HeaderFields.TransformRounds].bytes); ReaderHelper.singleUint64(fields[HeaderFields.TransformRounds]!.bytes);
void writeKdfParameters(VarDictionary kdfParameters) => void writeKdfParameters(VarDictionary kdfParameters) =>
_setHeaderField(HeaderFields.KdfParameters, kdfParameters.write()); _setHeaderField(HeaderFields.KdfParameters, kdfParameters.write());
@ -563,7 +563,7 @@ class KdbxInvalidKeyException implements KdbxException {}
class KdbxCorruptedFileException implements KdbxException { class KdbxCorruptedFileException implements KdbxException {
KdbxCorruptedFileException([this.message]); KdbxCorruptedFileException([this.message]);
final String message; final String? message;
@override @override
String toString() { String toString() {
@ -636,8 +636,8 @@ class HashedBlockReader {
class InnerHeader { class InnerHeader {
InnerHeader({ InnerHeader({
@required this.fields, required this.fields,
List<InnerHeaderField> binaries, List<InnerHeaderField>? binaries,
}) : binaries = binaries ?? [], }) : binaries = binaries ?? [],
assert(fields != null); assert(fields != null);

32
lib/src/kdbx_meta.dart

@ -20,9 +20,9 @@ final _logger = Logger('kdbx_meta');
class KdbxMeta extends KdbxNode implements KdbxNodeContext { class KdbxMeta extends KdbxNode implements KdbxNodeContext {
KdbxMeta.create({ KdbxMeta.create({
@required String databaseName, required String databaseName,
@required this.ctx, required this.ctx,
String generator, String? generator,
}) : customData = KdbxCustomData.create(), }) : customData = KdbxCustomData.create(),
binaries = [], binaries = [],
_customIcons = {}, _customIcons = {},
@ -41,13 +41,13 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
KdbxMeta.read(xml.XmlElement node, this.ctx) KdbxMeta.read(xml.XmlElement node, this.ctx)
: customData = node : customData = node
.singleElement('CustomData') .singleElement('CustomData')
?.let((e) => KdbxCustomData.read(e)) ?? ?.let((e) => KdbxCustomData.read(e!)) ??
KdbxCustomData.create(), KdbxCustomData.create(),
binaries = node binaries = node
.singleElement(KdbxXml.NODE_BINARIES) .singleElement(KdbxXml.NODE_BINARIES)
?.let((el) sync* { ?.let((el) sync* {
for (final binaryNode in el.findElements(KdbxXml.NODE_BINARY)) { for (final binaryNode in el!.findElements(KdbxXml.NODE_BINARY)) {
final id = int.parse(binaryNode.getAttribute(KdbxXml.ATTR_ID)); final id = int.parse(binaryNode.getAttribute(KdbxXml.ATTR_ID)!);
yield MapEntry( yield MapEntry(
id, id,
KdbxBinary.readBinaryXml(binaryNode, isInline: false), KdbxBinary.readBinaryXml(binaryNode, isInline: false),
@ -56,7 +56,7 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
}) })
?.toList() ?.toList()
?.let((binaries) { ?.let((binaries) {
binaries.sort((a, b) => a.key.compareTo(b.key)); binaries!.sort((a, b) => a.key.compareTo(b.key));
for (var i = 0; i < binaries.length; i++) { for (var i = 0; i < binaries.length; i++) {
if (i != binaries[i].key) { if (i != binaries[i].key) {
throw KdbxCorruptedFileException( throw KdbxCorruptedFileException(
@ -69,7 +69,7 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
_customIcons = node _customIcons = node
.singleElement(KdbxXml.NODE_CUSTOM_ICONS) .singleElement(KdbxXml.NODE_CUSTOM_ICONS)
?.let((el) sync* { ?.let((el) sync* {
for (final iconNode in el.findElements(KdbxXml.NODE_ICON)) { for (final iconNode in el!.findElements(KdbxXml.NODE_ICON)) {
yield KdbxCustomIcon( yield KdbxCustomIcon(
uuid: KdbxUuid( uuid: KdbxUuid(
iconNode.singleTextNode(KdbxXml.NODE_UUID)), iconNode.singleTextNode(KdbxXml.NODE_UUID)),
@ -78,7 +78,7 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
} }
}) })
?.map((e) => MapEntry(e.uuid, e)) ?.map((e) => MapEntry(e.uuid, e))
?.let((that) => Map.fromEntries(that)) ?? ?.let((that) => Map.fromEntries(that!)) ??
{}, {},
super.read(node); super.read(node);
@ -88,11 +88,11 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
final KdbxCustomData customData; final KdbxCustomData customData;
/// only used in Kdbx 3 /// only used in Kdbx 3
final List<KdbxBinary> binaries; final List<KdbxBinary>? binaries;
final Map<KdbxUuid, KdbxCustomIcon> _customIcons; final Map<KdbxUuid?, KdbxCustomIcon> _customIcons;
Map<KdbxUuid, KdbxCustomIcon> get customIcons => Map<KdbxUuid?, KdbxCustomIcon> get customIcons =>
UnmodifiableMapView(_customIcons); UnmodifiableMapView(_customIcons);
void addCustomIcon(KdbxCustomIcon customIcon) { void addCustomIcon(KdbxCustomIcon customIcon) {
@ -173,8 +173,8 @@ class KdbxMeta extends KdbxNode implements KdbxNodeContext {
XmlElement(XmlName(KdbxXml.NODE_CUSTOM_ICONS)) XmlElement(XmlName(KdbxXml.NODE_CUSTOM_ICONS))
..children.addAll(customIcons.values.map( ..children.addAll(customIcons.values.map(
(e) => XmlUtils.createNode(KdbxXml.NODE_ICON, [ (e) => XmlUtils.createNode(KdbxXml.NODE_ICON, [
XmlUtils.createTextNode(KdbxXml.NODE_UUID, e.uuid.uuid), XmlUtils.createTextNode(KdbxXml.NODE_UUID, e.uuid!.uuid),
XmlUtils.createTextNode(KdbxXml.NODE_DATA, base64.encode(e.data)) XmlUtils.createTextNode(KdbxXml.NODE_DATA, base64.encode(e.data!))
]), ]),
)), )),
); );
@ -227,8 +227,8 @@ class KdbxCustomIcon {
KdbxCustomIcon({this.uuid, this.data}); KdbxCustomIcon({this.uuid, this.data});
/// uuid of the icon, must be unique within each file. /// uuid of the icon, must be unique within each file.
final KdbxUuid uuid; final KdbxUuid? uuid;
/// Encoded png data of the image. will be base64 encoded into the kdbx file. /// Encoded png data of the image. will be base64 encoded into the kdbx file.
final Uint8List data; final Uint8List? data;
} }

33
lib/src/kdbx_object.dart

@ -2,6 +2,7 @@ import 'dart:async';
import 'dart:convert'; import 'dart:convert';
import 'dart:typed_data'; import 'dart:typed_data';
import 'package:collection/collection.dart' show IterableExtension;
import 'package:kdbx/src/internal/extension_utils.dart'; import 'package:kdbx/src/internal/extension_utils.dart';
import 'package:kdbx/src/kdbx_file.dart'; import 'package:kdbx/src/kdbx_file.dart';
import 'package:kdbx/src/kdbx_format.dart'; import 'package:kdbx/src/kdbx_format.dart';
@ -20,7 +21,7 @@ import 'package:xml/xml.dart';
final _logger = Logger('kdbx.kdbx_object'); final _logger = Logger('kdbx.kdbx_object');
class ChangeEvent<T> { class ChangeEvent<T> {
ChangeEvent({this.object, this.isDirty}); ChangeEvent({required this.object, required this.isDirty});
final T object; final T object;
final bool isDirty; final bool isDirty;
@ -109,8 +110,8 @@ abstract class KdbxNode with Changeable<KdbxNode> {
} }
extension IterableKdbxObject<T extends KdbxObject> on Iterable<T> { extension IterableKdbxObject<T extends KdbxObject> on Iterable<T> {
T findByUuid(KdbxUuid uuid) => T? findByUuid(KdbxUuid uuid) =>
firstWhere((element) => element.uuid == uuid, orElse: () => null); firstWhereOrNull((element) => element.uuid == uuid);
} }
extension KdbxObjectInternal on KdbxObject { extension KdbxObjectInternal on KdbxObject {
@ -149,7 +150,7 @@ abstract class KdbxObject extends KdbxNode {
this.ctx, this.ctx,
this.file, this.file,
String nodeName, String nodeName,
KdbxGroup parent, KdbxGroup? parent,
) : assert(ctx != null), ) : assert(ctx != null),
times = KdbxTimes.create(ctx), times = KdbxTimes.create(ctx),
_parent = parent, _parent = parent,
@ -157,7 +158,7 @@ abstract class KdbxObject extends KdbxNode {
_uuid.set(KdbxUuid.random()); _uuid.set(KdbxUuid.random());
} }
KdbxObject.read(this.ctx, KdbxGroup parent, XmlElement node) KdbxObject.read(this.ctx, KdbxGroup? parent, XmlElement node)
: assert(ctx != null), : assert(ctx != null),
times = KdbxTimes.read(node.findElements('Times').single, ctx), times = KdbxTimes.read(node.findElements('Times').single, ctx),
_parent = parent, _parent = parent,
@ -165,13 +166,13 @@ abstract class KdbxObject extends KdbxNode {
/// the file this object is part of. will be set AFTER loading, etc. /// the file this object is part of. will be set AFTER loading, etc.
/// TODO: We should probably get rid of this `file` reference. /// TODO: We should probably get rid of this `file` reference.
KdbxFile file; KdbxFile? file;
final KdbxReadWriteContext ctx; final KdbxReadWriteContext ctx;
final KdbxTimes times; final KdbxTimes times;
KdbxUuid get uuid => _uuid.get(); KdbxUuid get uuid => _uuid.get()!;
UuidNode get _uuid => UuidNode(this, KdbxXml.NODE_UUID); UuidNode get _uuid => UuidNode(this, KdbxXml.NODE_UUID);
@ -179,16 +180,16 @@ abstract class KdbxObject extends KdbxNode {
UuidNode get customIconUuid => UuidNode(this, 'CustomIconUUID'); UuidNode get customIconUuid => UuidNode(this, 'CustomIconUUID');
KdbxGroup get parent => _parent; KdbxGroup? get parent => _parent;
KdbxGroup _parent; KdbxGroup? _parent;
KdbxCustomIcon get customIcon => KdbxCustomIcon? get customIcon =>
customIconUuid.get()?.let((uuid) => file.body.meta.customIcons[uuid]); customIconUuid.get()?.let((uuid) => file!.body.meta.customIcons[uuid]);
set customIcon(KdbxCustomIcon icon) { set customIcon(KdbxCustomIcon? icon) {
if (icon != null) { if (icon != null) {
file.body.meta.addCustomIcon(icon); file!.body.meta.addCustomIcon(icon);
customIconUuid.set(icon.uuid); customIconUuid.set(icon.uuid);
} else { } else {
customIconUuid.set(null); customIconUuid.set(null);
@ -204,11 +205,11 @@ abstract class KdbxObject extends KdbxNode {
} }
bool wasModifiedAfter(KdbxObject other) => times.lastModificationTime bool wasModifiedAfter(KdbxObject other) => times.lastModificationTime
.get() .get()!
.isAfter(other.times.lastModificationTime.get()); .isAfter(other.times.lastModificationTime.get()!);
bool wasMovedAfter(KdbxObject other) => bool wasMovedAfter(KdbxObject other) =>
times.locationChanged.get().isAfter(other.times.locationChanged.get()); times.locationChanged.get()!.isAfter(other.times.locationChanged.get()!);
@override @override
XmlElement toXml() { XmlElement toXml() {

8
lib/src/kdbx_var_dictionary.dart

@ -18,7 +18,7 @@ class ValueType<T> {
const ValueType(this.code, this.decoder, [this.encoder]); const ValueType(this.code, this.decoder, [this.encoder]);
final int code; final int code;
final Decoder<T> decoder; final Decoder<T> decoder;
final Encoder<T> encoder; final Encoder<T>? encoder;
static final typeUInt32 = ValueType( static final typeUInt32 = ValueType(
0x04, 0x04,
@ -70,7 +70,7 @@ class ValueType<T> {
]; ];
void encode(WriterHelper writer, T value) { void encode(WriterHelper writer, T value) {
encoder(writer, value); encoder!(writer, value);
} }
} }
@ -125,11 +125,11 @@ class VarDictionary {
return writer.output.toBytes(); return writer.output.toBytes();
} }
T get<T>(ValueType<T> type, String key) => _dict[key]?._value as T; T? get<T>(ValueType<T> type, String key) => _dict[key]?._value as T?;
void set<T>(ValueType<T> type, String key, T value) => void set<T>(ValueType<T> type, String key, T value) =>
_dict[key] = VarDictionaryItem<T>(key, type, value); _dict[key] = VarDictionaryItem<T>(key, type, value);
static VarDictionaryItem<dynamic> _readItem(ReaderHelper reader) { static VarDictionaryItem<dynamic>? _readItem(ReaderHelper reader) {
final type = reader.readUint8(); final type = reader.readUint8();
if (type == 0) { if (type == 0) {
return null; return null;

49
lib/src/kdbx_xml.dart

@ -2,6 +2,7 @@ import 'dart:convert';
import 'dart:typed_data'; import 'dart:typed_data';
import 'package:clock/clock.dart'; import 'package:clock/clock.dart';
import 'package:collection/collection.dart' show IterableExtension;
import 'package:kdbx/src/kdbx_format.dart'; import 'package:kdbx/src/kdbx_format.dart';
import 'package:kdbx/src/kdbx_header.dart'; import 'package:kdbx/src/kdbx_header.dart';
import 'package:kdbx/src/kdbx_object.dart'; import 'package:kdbx/src/kdbx_object.dart';
@ -80,27 +81,27 @@ extension on List<XmlNode> {
} }
} }
abstract class KdbxSubTextNode<T> extends KdbxSubNode<T> { abstract class KdbxSubTextNode<T> extends KdbxSubNode<T?> {
KdbxSubTextNode(KdbxNode node, String name) : super(node, name); KdbxSubTextNode(KdbxNode node, String name) : super(node, name);
void Function() _onModify; void Function()? _onModify;
@protected @protected
String encode(T value); String? encode(T value);
@protected @protected
T decode(String value); T decode(String value);
XmlElement _opt(String nodeName) => node.node XmlElement? _opt(String nodeName) => node.node
.findElements(nodeName) .findElements(nodeName)
.singleWhere((x) => true, orElse: () => null); .singleWhereOrNull((x) => true);
void setOnModifyListener(void Function() onModify) { void setOnModifyListener(void Function() onModify) {
_onModify = onModify; _onModify = onModify;
} }
@override @override
T get() { T? get() {
final textValue = _opt(name)?.text; final textValue = _opt(name)?.text;
if (textValue == null) { if (textValue == null) {
return null; return null;
@ -109,7 +110,7 @@ abstract class KdbxSubTextNode<T> extends KdbxSubNode<T> {
} }
@override @override
bool set(T value, {bool force = false}) { bool set(T? value, {bool force = false}) {
if (get() == value && force != true) { if (get() == value && force != true) {
return false; return false;
} }
@ -140,24 +141,24 @@ abstract class KdbxSubTextNode<T> extends KdbxSubNode<T> {
} }
} }
class IntNode extends KdbxSubTextNode<int> { class IntNode extends KdbxSubTextNode<int?> {
IntNode(KdbxNode node, String name) : super(node, name); IntNode(KdbxNode node, String name) : super(node, name);
@override @override
int decode(String value) => int.tryParse(value); int? decode(String value) => int.tryParse(value);
@override @override
String encode(int value) => value.toString(); String encode(int? value) => value.toString();
} }
class StringNode extends KdbxSubTextNode<String> { class StringNode extends KdbxSubTextNode<String?> {
StringNode(KdbxNode node, String name) : super(node, name); StringNode(KdbxNode node, String name) : super(node, name);
@override @override
String decode(String value) => value; String decode(String value) => value;
@override @override
String encode(String value) => value; String? encode(String? value) => value;
} }
class Base64Node extends KdbxSubTextNode<ByteBuffer> { class Base64Node extends KdbxSubTextNode<ByteBuffer> {
@ -170,21 +171,21 @@ class Base64Node extends KdbxSubTextNode<ByteBuffer> {
String encode(ByteBuffer value) => base64.encode(value.asUint8List()); String encode(ByteBuffer value) => base64.encode(value.asUint8List());
} }
class UuidNode extends KdbxSubTextNode<KdbxUuid> { class UuidNode extends KdbxSubTextNode<KdbxUuid?> {
UuidNode(KdbxNode node, String name) : super(node, name); UuidNode(KdbxNode node, String name) : super(node, name);
@override @override
KdbxUuid decode(String value) => KdbxUuid(value); KdbxUuid decode(String value) => KdbxUuid(value);
@override @override
String encode(KdbxUuid value) => value.uuid; String encode(KdbxUuid? value) => value!.uuid;
} }
class IconNode extends KdbxSubTextNode<KdbxIcon> { class IconNode extends KdbxSubTextNode<KdbxIcon> {
IconNode(KdbxNode node, String name) : super(node, name); IconNode(KdbxNode node, String name) : super(node, name);
@override @override
KdbxIcon decode(String value) => KdbxIcon.values[int.tryParse(value)]; KdbxIcon decode(String value) => KdbxIcon.values[int.tryParse(value)!];
@override @override
String encode(KdbxIcon value) => value.index.toString(); String encode(KdbxIcon value) => value.index.toString();
@ -214,11 +215,11 @@ class ColorNode extends KdbxSubTextNode<KdbxColor> {
String encode(KdbxColor value) => value.isNull ? '' : value._rgb; String encode(KdbxColor value) => value.isNull ? '' : value._rgb;
} }
class BooleanNode extends KdbxSubTextNode<bool> { class BooleanNode extends KdbxSubTextNode<bool?> {
BooleanNode(KdbxNode node, String name) : super(node, name); BooleanNode(KdbxNode node, String name) : super(node, name);
@override @override
bool decode(String value) { bool? decode(String value) {
switch (value?.toLowerCase()) { switch (value?.toLowerCase()) {
case 'null': case 'null':
return null; return null;
@ -231,10 +232,10 @@ class BooleanNode extends KdbxSubTextNode<bool> {
} }
@override @override
String encode(bool value) => value ? 'true' : 'false'; String encode(bool? value) => value! ? 'true' : 'false';
} }
class DateTimeUtcNode extends KdbxSubTextNode<DateTime> { class DateTimeUtcNode extends KdbxSubTextNode<DateTime?> {
DateTimeUtcNode(KdbxNodeContext node, String name) : super(node, name); DateTimeUtcNode(KdbxNodeContext node, String name) : super(node, name);
static const EpochSeconds = 62135596800; static const EpochSeconds = 62135596800;
@ -250,7 +251,7 @@ class DateTimeUtcNode extends KdbxSubTextNode<DateTime> {
} }
@override @override
DateTime decode(String value) { DateTime? decode(String value) {
if (value == null) { if (value == null) {
return null; return null;
} }
@ -278,17 +279,17 @@ class DateTimeUtcNode extends KdbxSubTextNode<DateTime> {
} }
@override @override
String encode(DateTime value) { String encode(DateTime? value) {
assert(value.isUtc); assert(value!.isUtc);
if (_ctx.versionMajor >= 4) { if (_ctx.versionMajor >= 4) {
// for kdbx v4 we need to support binary/base64 // for kdbx v4 we need to support binary/base64
final secondsFrom00 = final secondsFrom00 =
(value.millisecondsSinceEpoch ~/ 1000) + EpochSeconds; (value!.millisecondsSinceEpoch ~/ 1000) + EpochSeconds;
final encoded = base64.encode( final encoded = base64.encode(
(WriterHelper()..writeUint64(secondsFrom00)).output.toBytes()); (WriterHelper()..writeUint64(secondsFrom00)).output.toBytes());
return encoded; return encoded;
} }
return DateTimeUtils.toIso8601StringSeconds(value); return DateTimeUtils.toIso8601StringSeconds(value!);
} }
} }

38
lib/src/utils/byte_utils.dart

@ -31,7 +31,7 @@ class ByteUtils {
static String toHex(int val) => '0x${val.toRadixString(16).padLeft(2, '0')}'; static String toHex(int val) => '0x${val.toRadixString(16).padLeft(2, '0')}';
static String toHexList(List<int> list) => static String toHexList(List<int>? list) =>
list?.map((val) => toHex(val))?.join(' ') ?? '(null)'; list?.map((val) => toHex(val))?.join(' ') ?? '(null)';
} }
@ -40,9 +40,9 @@ extension Uint8ListExt on Uint8List {
} }
class ReaderHelper { class ReaderHelper {
factory ReaderHelper(Uint8List byteData) => KdbxFormat.dartWebWorkaround factory ReaderHelper(Uint8List? byteData) => KdbxFormat.dartWebWorkaround
? ReaderHelperDartWeb(byteData) ? ReaderHelperDartWeb(byteData!)
: ReaderHelper._(byteData); : ReaderHelper._(byteData!);
ReaderHelper._(this.byteData) : lengthInBytes = byteData.lengthInBytes; ReaderHelper._(this.byteData) : lengthInBytes = byteData.lengthInBytes;
final Uint8List byteData; final Uint8List byteData;
@ -101,8 +101,8 @@ class ReaderHelper {
Uint8List readRemaining() => _nextBytes(lengthInBytes - pos); Uint8List readRemaining() => _nextBytes(lengthInBytes - pos);
static int singleUint32(Uint8List bytes) => ReaderHelper(bytes).readUint32(); static int singleUint32(Uint8List? bytes) => ReaderHelper(bytes).readUint32();
static int singleUint64(Uint8List bytes) => ReaderHelper(bytes).readUint64(); static int singleUint64(Uint8List? bytes) => ReaderHelper(bytes).readUint64();
} }
class ReaderHelperDartWeb extends ReaderHelper { class ReaderHelperDartWeb extends ReaderHelper {
@ -125,48 +125,48 @@ class ReaderHelperDartWeb extends ReaderHelper {
typedef LengthWriter = void Function(int length); typedef LengthWriter = void Function(int length);
class WriterHelper { class WriterHelper {
factory WriterHelper([BytesBuilder output]) => KdbxFormat.dartWebWorkaround factory WriterHelper([BytesBuilder? output]) => KdbxFormat.dartWebWorkaround
? WriterHelperDartWeb(output) ? WriterHelperDartWeb(output)
: WriterHelper._(output); : WriterHelper._(output);
WriterHelper._([BytesBuilder output]) : output = output ?? BytesBuilder(); WriterHelper._([BytesBuilder? output]) : output = output ?? BytesBuilder();
final BytesBuilder output; final BytesBuilder output;
void _write(ByteData byteData) => output.add(byteData.buffer.asUint8List()); void _write(ByteData byteData) => output.add(byteData.buffer.asUint8List());
void writeBytes(Uint8List bytes, [LengthWriter lengthWriter]) { void writeBytes(Uint8List bytes, [LengthWriter? lengthWriter]) {
lengthWriter?.call(bytes.length); lengthWriter?.call(bytes.length);
output.add(bytes); output.add(bytes);
// output.asUint8List().addAll(bytes); // output.asUint8List().addAll(bytes);
} }
void writeUint32(int value, [LengthWriter lengthWriter]) { void writeUint32(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(4); lengthWriter?.call(4);
_write(ByteData(4)..setUint32(0, value, Endian.little)); _write(ByteData(4)..setUint32(0, value, Endian.little));
// output.asUint32List().add(value); // output.asUint32List().add(value);
} }
void writeUint64(int value, [LengthWriter lengthWriter]) { void writeUint64(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(8); lengthWriter?.call(8);
_write(ByteData(8)..setUint64(0, value, Endian.little)); _write(ByteData(8)..setUint64(0, value, Endian.little));
} }
void writeUint16(int value, [LengthWriter lengthWriter]) { void writeUint16(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(2); lengthWriter?.call(2);
_write(ByteData(2)..setUint16(0, value, Endian.little)); _write(ByteData(2)..setUint16(0, value, Endian.little));
} }
void writeInt32(int value, [LengthWriter lengthWriter]) { void writeInt32(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(4); lengthWriter?.call(4);
_write(ByteData(4)..setInt32(0, value, Endian.little)); _write(ByteData(4)..setInt32(0, value, Endian.little));
} }
void writeInt64(int value, [LengthWriter lengthWriter]) { void writeInt64(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(8); lengthWriter?.call(8);
_write(ByteData(8)..setInt64(0, value, Endian.little)); _write(ByteData(8)..setInt64(0, value, Endian.little));
} }
void writeUint8(int value, [LengthWriter lengthWriter]) { void writeUint8(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(1); lengthWriter?.call(1);
output.addByte(value); output.addByte(value);
} }
@ -176,7 +176,7 @@ class WriterHelper {
static Uint8List singleUint64Bytes(int val) => static Uint8List singleUint64Bytes(int val) =>
(WriterHelper()..writeUint64(val)).output.toBytes(); (WriterHelper()..writeUint64(val)).output.toBytes();
int writeString(String value, [LengthWriter lengthWriter]) { int writeString(String value, [LengthWriter? lengthWriter]) {
final bytes = const Utf8Encoder().convert(value); final bytes = const Utf8Encoder().convert(value);
lengthWriter?.call(bytes.length); lengthWriter?.call(bytes.length);
writeBytes(bytes); writeBytes(bytes);
@ -185,10 +185,10 @@ class WriterHelper {
} }
class WriterHelperDartWeb extends WriterHelper { class WriterHelperDartWeb extends WriterHelper {
WriterHelperDartWeb([BytesBuilder output]) : super._(output); WriterHelperDartWeb([BytesBuilder? output]) : super._(output);
@override @override
void writeUint64(int value, [LengthWriter lengthWriter]) { void writeUint64(int value, [LengthWriter? lengthWriter]) {
lengthWriter?.call(8); lengthWriter?.call(8);
final _endian = Endian.little; final _endian = Endian.little;
@ -206,7 +206,7 @@ class WriterHelperDartWeb extends WriterHelper {
} }
@override @override
void writeInt64(int value, [LengthWriter lengthWriter]) { void writeInt64(int value, [LengthWriter? lengthWriter]) {
writeUint64(value, lengthWriter); writeUint64(value, lengthWriter);
} }
} }

12
lib/src/utils/print_utils.dart

@ -8,8 +8,8 @@ class KdbxPrintUtils {
this.forceDecrypt = false, this.forceDecrypt = false,
this.allFields = false, this.allFields = false,
}); });
final bool forceDecrypt; final bool? forceDecrypt;
final bool allFields; final bool? allFields;
String catGroupToString(KdbxGroup group) => String catGroupToString(KdbxGroup group) =>
(StringBuffer()..let((that) => catGroup(that, group))).toString(); (StringBuffer()..let((that) => catGroup(that, group))).toString();
@ -20,21 +20,21 @@ class KdbxPrintUtils {
for (final group in group.groups) { for (final group in group.groups) {
catGroup(buf, group, depth: depth + 1); catGroup(buf, group, depth: depth + 1);
} }
final valueToSting = (StringValue value) => final valueToSting = (StringValue? value) =>
forceDecrypt ? value?.getText() : value?.toString(); forceDecrypt! ? value?.getText() : value?.toString();
for (final entry in group.entries) { for (final entry in group.entries) {
final value = entry.getString(KdbxKeyCommon.PASSWORD); final value = entry.getString(KdbxKeyCommon.PASSWORD);
buf.writeln('$indent `- ${entry.debugLabel()}: ' buf.writeln('$indent `- ${entry.debugLabel()}: '
'${valueToSting(value)}'); '${valueToSting(value)}');
if (allFields) { if (allFields!) {
buf.writeln(entry.stringEntries buf.writeln(entry.stringEntries
.map((field) => .map((field) =>
'$indent ${field.key} = ${valueToSting(field.value)}') '$indent ${field.key} = ${valueToSting(field.value)}')
.join('\n')); .join('\n'));
} }
buf.writeln(entry.binaryEntries buf.writeln(entry.binaryEntries
.map((b) => '$indent `- file: ${b.key} - ${b.value.value.length}') .map((b) => '$indent `- file: ${b.key} - ${b.value.value!.length}')
.join('\n')); .join('\n'));
} }
} }

4
pubspec.yaml

@ -4,7 +4,7 @@ version: 1.0.0
homepage: https://github.com/authpass/kdbx.dart homepage: https://github.com/authpass/kdbx.dart
environment: environment:
sdk: '>=2.8.0 <3.0.0' sdk: '>=2.12.0 <3.0.0'
dependencies: dependencies:
# flutter: # flutter:
@ -27,7 +27,7 @@ dependencies:
supercharged_dart: '>=1.2.0 <4.0.0' supercharged_dart: '>=1.2.0 <4.0.0'
synchronized: '>=2.2.0 <4.0.0' synchronized: '>=2.2.0 <4.0.0'
collection: '>=1.14.0 <2.0.0' collection: ^1.15.0-nullsafety.4
# required for bin/ # required for bin/
args: '>1.5.0 <3.0.0' args: '>1.5.0 <3.0.0'

2
test/deleted_objects_test.dart

@ -15,7 +15,7 @@ void main() {
final orig = final orig =
await TestUtil.readKdbxFile('test/test_files/tombstonetest.kdbx'); await TestUtil.readKdbxFile('test/test_files/tombstonetest.kdbx');
expect(orig.body.deletedObjects, hasLength(1)); expect(orig.body.deletedObjects, hasLength(1));
final dt = orig.body.deletedObjects.first.deletionTime.get(); final dt = orig.body.deletedObjects.first.deletionTime.get()!;
expect([dt.year, dt.month, dt.day], [2020, 8, 30]); expect([dt.year, dt.month, dt.day], [2020, 8, 30]);
final reload = await TestUtil.saveAndRead(orig); final reload = await TestUtil.saveAndRead(orig);
expect(reload.body.deletedObjects, hasLength(1)); expect(reload.body.deletedObjects, hasLength(1));

2
test/icon/kdbx_customicon_test.dart

@ -7,6 +7,6 @@ void main() {
test('load custom icons from file', () async { test('load custom icons from file', () async {
final file = await TestUtil.readKdbxFile('test/icon/icontest.kdbx'); final file = await TestUtil.readKdbxFile('test/icon/icontest.kdbx');
final entry = file.body.rootGroup.entries.first; final entry = file.body.rootGroup.entries.first;
expect(entry.customIcon.data, isNotNull); expect(entry.customIcon!.data, isNotNull);
}); });
} }

2
test/internal/test_utils.dart

@ -38,7 +38,7 @@ class TestUtil {
} }
static Future<KdbxFile> readKdbxFileBytes(Uint8List data, static Future<KdbxFile> readKdbxFileBytes(Uint8List data,
{String password = 'asdf', Credentials credentials}) async { {String password = 'asdf', Credentials? credentials}) async {
final kdbxFormat = TestUtil.kdbxFormat(); final kdbxFormat = TestUtil.kdbxFormat();
final file = await kdbxFormat.read( final file = await kdbxFormat.read(
data, credentials ?? Credentials(ProtectedValue.fromString(password))); data, credentials ?? Credentials(ProtectedValue.fromString(password)));

4
test/kdbx4_test.dart

@ -24,7 +24,7 @@ void main() {
final file = await kdbxFormat.read( final file = await kdbxFormat.read(
data, Credentials(ProtectedValue.fromString('asdf'))); data, Credentials(ProtectedValue.fromString('asdf')));
final firstEntry = file.body.rootGroup.entries.first; final firstEntry = file.body.rootGroup.entries.first;
final pwd = firstEntry.getString(KdbxKeyCommon.PASSWORD).getText(); final pwd = firstEntry.getString(KdbxKeyCommon.PASSWORD)!.getText();
expect(pwd, 'MyPassword'); expect(pwd, 'MyPassword');
}); });
test('Reading kdbx4_keeweb', () async { test('Reading kdbx4_keeweb', () async {
@ -32,7 +32,7 @@ void main() {
final file = await kdbxFormat.read( final file = await kdbxFormat.read(
data, Credentials(ProtectedValue.fromString('asdf'))); data, Credentials(ProtectedValue.fromString('asdf')));
final firstEntry = file.body.rootGroup.entries.first; final firstEntry = file.body.rootGroup.entries.first;
final pwd = firstEntry.getString(KdbxKeyCommon.PASSWORD).getText(); final pwd = firstEntry.getString(KdbxKeyCommon.PASSWORD)!.getText();
expect(pwd, 'def'); expect(pwd, 'def');
}); });
test('Reading kdbx4_keeweb modification time', () async { test('Reading kdbx4_keeweb modification time', () async {

8
test/kdbx_binaries_test.dart

@ -58,10 +58,10 @@ void main() {
for (final binary in binaries) { for (final binary in binaries) {
switch (binary.key.key) { switch (binary.key.key) {
case 'example1.txt': case 'example1.txt':
expect(utf8.decode(binary.value.value), 'content1 example\n\n'); expect(utf8.decode(binary.value.value!), 'content1 example\n\n');
break; break;
case 'example2.txt': case 'example2.txt':
expect(utf8.decode(binary.value.value), 'content2 example\n\n'); expect(utf8.decode(binary.value.value!), 'content2 example\n\n');
break; break;
case 'keepasslogo.jpeg': case 'keepasslogo.jpeg':
expect(binary.value.value, hasLength(7092)); expect(binary.value.value, hasLength(7092));
@ -129,7 +129,7 @@ void main() {
final entry = file.body.rootGroup.entries.first; final entry = file.body.rootGroup.entries.first;
for (final name in ['a', 'b', 'c', 'd', 'e']) { for (final name in ['a', 'b', 'c', 'd', 'e']) {
expect( expect(
utf8.decode(entry.getBinary(KdbxKey('$name.txt')).value).trim(), utf8.decode(entry.getBinary(KdbxKey('$name.txt'))!.value!).trim(),
name, name,
); );
} }
@ -190,7 +190,7 @@ class IsUtf8String extends CustomMatcher {
IsUtf8String(dynamic matcher) : super('is utf8 string', 'utf8', matcher); IsUtf8String(dynamic matcher) : super('is utf8 string', 'utf8', matcher);
@override @override
Object featureValueOf(dynamic actual) { Object? featureValueOf(dynamic actual) {
if (actual is Uint8List) { if (actual is Uint8List) {
return utf8.decode(actual); return utf8.decode(actual);
} }

20
test/kdbx_history_test.dart

@ -12,7 +12,7 @@ class StreamExpect<T> {
if (_expectNext == null) { if (_expectNext == null) {
fail('Got event, but none was expected. $event'); fail('Got event, but none was expected. $event');
} }
expect(event, _expectNext.orNull); expect(event, _expectNext!.orNull);
_expectNext = null; _expectNext = null;
}, onDone: () { }, onDone: () {
expect(_expectNext, isNull); expect(_expectNext, isNull);
@ -23,13 +23,13 @@ class StreamExpect<T> {
}); });
} }
Future<RET> expectNext<RET>(T value, FutureOr<RET> Function() cb) async { Future<RET> expectNext<RET>(T value, FutureOr<RET>? Function() cb) async {
if (_expectNext != null) { if (_expectNext != null) {
fail('The last event was never received. last: $_expectNext'); fail('The last event was never received. last: $_expectNext');
} }
_expectNext = Optional.fromNullable(value); _expectNext = Optional.fromNullable(value);
try { try {
return await cb(); return await cb()!;
} finally { } finally {
await pumpEventQueue(); await pumpEventQueue();
} }
@ -42,7 +42,7 @@ class StreamExpect<T> {
final Stream<T> stream; final Stream<T> stream;
bool isDone = false; bool isDone = false;
dynamic error; dynamic error;
Optional<T> _expectNext; Optional<T>? _expectNext;
} }
void main() { void main() {
@ -57,7 +57,7 @@ void main() {
{ {
final first = file.body.rootGroup.entries.first; final first = file.body.rootGroup.entries.first;
expect(file.header.version.major, 3); expect(file.header.version.major, 3);
expect(first.getString(TestUtil.keyTitle).getText(), valueOrig); expect(first.getString(TestUtil.keyTitle)!.getText(), valueOrig);
await dirtyExpect.expectNext({first}, () { await dirtyExpect.expectNext({first}, () {
first.setString(TestUtil.keyTitle, PlainValue(value1)); first.setString(TestUtil.keyTitle, PlainValue(value1));
}); });
@ -68,8 +68,8 @@ void main() {
expect(file.dirtyObjects, isEmpty); expect(file.dirtyObjects, isEmpty);
{ {
final first = f2.body.rootGroup.entries.first; final first = f2.body.rootGroup.entries.first;
expect(first.getString(TestUtil.keyTitle).getText(), value1); expect(first.getString(TestUtil.keyTitle)!.getText(), value1);
expect(first.history.last.getString(TestUtil.keyTitle).getText(), expect(first.history.last.getString(TestUtil.keyTitle)!.getText(),
valueOrig); valueOrig);
await dirtyExpect.expectNext({}, () => file.save()); await dirtyExpect.expectNext({}, () => file.save());
} }
@ -85,11 +85,11 @@ void main() {
expect(file.dirtyObjects, isEmpty); expect(file.dirtyObjects, isEmpty);
{ {
final first = f3.body.rootGroup.entries.first; final first = f3.body.rootGroup.entries.first;
expect(first.getString(TestUtil.keyTitle).getText(), value2); expect(first.getString(TestUtil.keyTitle)!.getText(), value2);
expect(first.history, hasLength(2)); expect(first.history, hasLength(2));
expect( expect(
first.history.last.getString(TestUtil.keyTitle).getText(), value1); first.history.last.getString(TestUtil.keyTitle)!.getText(), value1);
expect(first.history.first.getString(TestUtil.keyTitle).getText(), expect(first.history.first.getString(TestUtil.keyTitle)!.getText(),
valueOrig); valueOrig);
await dirtyExpect.expectNext({}, () => file.save()); await dirtyExpect.expectNext({}, () => file.save());
} }

6
test/kdbx_test.dart

@ -100,7 +100,7 @@ void main() {
final file = await TestUtil.readKdbxFile('test/keepass2test.kdbx'); final file = await TestUtil.readKdbxFile('test/keepass2test.kdbx');
final first = file.body.rootGroup.entries.first; final first = file.body.rootGroup.entries.first;
expect(file.header.version.major, 3); expect(file.header.version.major, 3);
expect(first.getString(KdbxKeyCommon.TITLE).getText(), 'Sample Entry'); expect(first.getString(KdbxKeyCommon.TITLE)!.getText(), 'Sample Entry');
final modTime = first.times.lastModificationTime.get(); final modTime = first.times.lastModificationTime.get();
expect(modTime, DateTime.utc(2020, 5, 6, 7, 31, 48)); expect(modTime, DateTime.utc(2020, 5, 6, 7, 31, 48));
}); });
@ -110,7 +110,7 @@ void main() {
{ {
final first = file.body.rootGroup.entries.first; final first = file.body.rootGroup.entries.first;
expect(file.header.version.major, 3); expect(file.header.version.major, 3);
expect(first.getString(KdbxKeyCommon.TITLE).getText(), 'Sample Entry'); expect(first.getString(KdbxKeyCommon.TITLE)!.getText(), 'Sample Entry');
first.times.lastModificationTime.set(newModDate); first.times.lastModificationTime.set(newModDate);
} }
final saved = await file.save(); final saved = await file.save();
@ -141,7 +141,7 @@ void main() {
final kdbx = await kdbxFormat.read(saved, credentials); final kdbx = await kdbxFormat.read(saved, credentials);
expect( expect(
kdbx.body.rootGroup.entries.first kdbx.body.rootGroup.entries.first
.getString(KdbxKeyCommon.PASSWORD) .getString(KdbxKeyCommon.PASSWORD)!
.getText(), .getText(),
'LoremIpsum'); 'LoremIpsum');
File('test.kdbx').writeAsBytesSync(saved); File('test.kdbx').writeAsBytesSync(saved);

2
test/merge/kdbx_merge_test.dart

@ -92,7 +92,7 @@ void main() {
'${KdbxPrintUtils().catGroupToString(file.body.rootGroup)}'); '${KdbxPrintUtils().catGroupToString(file.body.rootGroup)}');
final set = Set<KdbxUuid>.from(merge.merged.keys); final set = Set<KdbxUuid>.from(merge.merged.keys);
expect(set, hasLength(5)); expect(set, hasLength(5));
expect(Set<KdbxNode>.from(merge.changes.map<KdbxNode>((e) => e.object)), expect(Set<KdbxNode>.from(merge.changes.map<KdbxNode?>((e) => e.object)),
hasLength(2)); hasLength(2));
}), }),
); );

Loading…
Cancel
Save