Client already closed exception on hot restart.
Opened this issue · 5 comments
First time the project runs, everything works fine.
Whenever I issue a hot restart in the IDE, this exception is raised:
Looking for _client.close()
on this class, I found two on abort()
method:
Whenever I put a breakpoint on either .close()
, the issue goes away (so, after a delay, the connection is not closed anymore).
Maybe a racing condition?
Hey @JCKodel what version of Powersync are you using and which platform (iOS, Android, Mac, Windows, Linux) are you testing on?
powersync: ^1.10.0
iOS.
powersync: ^1.10.0iOS.
I am unable to reproduce this while running in debug mode and hot restarting multiple times using our todolist demo. Are you changing any properties or doing anything specific before hot restarting?
I can try and reproduce the exact scenario that might be causing this.
Not that I'm aware of.
I open the DB for each authenticated user:
Future<Result<void, OpenDBFailure>> open(String userId) async {
await close();
try {
final dir = await PathProvider.getApplicationSupportDirectory();
final freeSpace =
(await DiskCapacity().getFreeDiskSpaceForPath(dir.path)) *
1024 *
1024;
final path = Path.join(dir.path, "${userId}.db");
final dbFile = File(path);
final dbSize = dbFile.existsSync() ? dbFile.lengthSync() : 1048576;
if (freeSpace < dbSize) {
final message = "Not enough disk space for database "
"(required: ${dbSize}, available: ${freeSpace})";
_analyticsProvider.logError<Database>(
message,
Exception(message),
StackTrace.current,
true,
);
return Result.failure(OpenDBFailure.notEnoughSpace);
}
_analyticsProvider.logInfo<Database>("Opening database");
debugPrint(
"Path: ${path}, free space: ${freeSpace}, db size: ${dbSize}",
);
_schema.validate();
final db = PowerSyncDatabase(schema: _schema, path: path);
await db.initialize();
await db.refreshSchema();
_db = db;
return Result.success(null);
} catch (exception, stackTrace) {
_analyticsProvider.logError<Database>(
"Opening database",
exception,
stackTrace,
true,
);
return Result.failure(OpenDBFailure.unknown);
}
}
Future<void> close() async {
if (_db == null) {
return;
}
if (_db!.closed) {
return;
}
if (_db!.connected) {
_analyticsProvider.logInfo<Database>("Disconnecting database");
await _db!.disconnect();
}
_analyticsProvider.logInfo<Database>("Closing database");
await _db!.close();
}
Future<void> startSync() async {
final dbIsReady =
_db != null && _db!.closed == false && _db!.connected == false;
if (dbIsReady == false) {
return;
}
_analyticsProvider.logInfo<Database>("Connecting database");
await _db!.connect(
connector: _BackendConnector(
_db!,
_authProvider,
_httpProvider,
_analyticsProvider,
),
);
debugPrint(_db!.connected.toString());
_analyticsProvider.logInfo<Database>("Waiting for first sync");
if (_db!.connected) {
await _db!.waitForFirstSync();
}
}
Then I use this connector:
final class _BackendConnector extends PowerSyncBackendConnector {
_BackendConnector(
this.db,
this.authProvider,
this.httpProvider,
this.analyticsProvider,
);
final PowerSyncDatabase db;
final IAuthProvider authProvider;
final IHttpProvider httpProvider;
final IAnalyticsProvider analyticsProvider;
@override
Future<PowerSyncCredentials?> fetchCredentials() async {
final result = await authProvider.getAccessToken();
if (result.isFailure) {
debugPrint("Failure getting access token: ${result}");
return null;
}
debugPrint(result.value.token);
return PowerSyncCredentials(
endpoint: _psUrl,
token: result.value.token,
userId: result.value.userId,
expiresAt: result.value.expiresAt,
);
}
@override
Future<void> uploadData(PowerSyncDatabase database) async {
final credentials = await getCredentialsCached();
if (credentials == null) {
debugPrint("Disconnecting because credentials are not available");
await db.disconnect();
return;
}
final batch = await db.getCrudBatch();
if (batch == null) {
debugPrint("Nothing to sync");
return;
}
final rows = <String, CrudEntry>{};
for (final crud in batch.crud) {
rows[crud.table] = crud;
}
final graphql = StringBuffer();
for (final crud in rows.values) {
switch (crud.op) {
case UpdateType.put:
_addInsertOp(graphql, crud);
case UpdateType.patch:
_addUpdateOp(graphql, crud);
case UpdateType.delete:
_addDeleteOp(graphql, crud);
}
}
if (graphql.isEmpty) {
debugPrint("Nothing to sync (2)");
await batch.complete();
return;
}
final query = "mutation UploadData{${graphql}}";
final body = jsonEncode(
{
"query": query,
"variables": null,
"operationName": "UploadData",
},
);
final request = HttpRequest(
method: HttpMethod.post,
url: "${_geUrl}/v1/graphql",
headers: {
"Content-Type": "application/json; charset=utf-8",
"Authorization": "Bearer ${credentials.token}",
},
bodyBytes: utf8.encode(body),
);
debugPrint(query);
final response = await httpProvider.sendRequest(request);
if (response.isFailure) {
debugPrint(response.failure.toString());
return;
}
if (response.value.statusCode == 401) {
debugPrint("InvalidCredentials");
invalidateCredentials();
return;
}
if (response.value.statusCode != 200) {
analyticsProvider.logError<_BackendConnector>(
"Invalid response ${response.value.statusCode}",
HttpException("Invalid response ${response.value.statusCode}"),
StackTrace.current,
false,
);
return;
}
final data = await response.value.getMap();
final errors = data["errors"] as List?;
if (errors == null) {
await batch.complete();
return;
} else {
final errorMessages = errors
.map(
// ignore: avoid_dynamic_calls
(e) => "${e["message"]} (${e["extensions"]})",
)
.toList();
final errorMessage = errorMessages.join("\n");
final exception = Exception(errorMessage);
analyticsProvider.logError<_BackendConnector>(
errorMessage,
exception,
StackTrace.current,
true,
);
}
}
void _addInsertOp(StringBuffer graphql, CrudEntry crud) {
graphql.write(
"insert_${crud.table}_one("
// ignore: missing_whitespace_between_adjacent_strings
"object: {"
"id:${jsonEncode(crud.id)},"
"${crud.opData!.entries.where((entry) => entry.key != "created_at").map(
(entry) => "${entry.key}:${jsonEncode(entry.value)}",
).join(",")}"
"}on_conflict:{constraint:${crud.table}_pkey,update_columns:["
"${crud.opData!.keys.join(",")}"
"]}"
"){__typename}",
);
}
void _addUpdateOp(StringBuffer graphql, CrudEntry crud) {
if (crud.opData == null || crud.opData!.isEmpty) {
return;
}
graphql.write(
"update_${crud.table}_by_pk("
"pk_columns:{id:${jsonEncode(crud.id)}},"
"_set:{"
"${crud.opData!.entries.where((entry) => entry.key != "created_at").map(
(entry) => "${entry.key}:${jsonEncode(entry.value)}",
).join(",")}"
"}){__typename}",
);
}
void _addDeleteOp(StringBuffer graphql, CrudEntry crud) {
graphql.write(
"delete_${crud.table}(where:{id:{_eq:"
"${jsonEncode(crud.id)}}}){affected_rows}",
);
}
}
Every single time I hot restart, the exception is thrown.
Hey @JCKodel sorry for getting back so late. It took me quite a while to recreate this, it seems this error occurs when there's a dangling database reference that hasn't been properly closed but is being closed again when hot restarting.
In our demo, we create a global reference to the db
, which avoids this issue. However, in your example, the db
is created differently, leading to this behavior.
To help you test and reproduce the issue:
- Ensure that the
db
reference is consistently handled in your code. - Try modifying your implementation to use a global db reference, similar to the approach in our demo.
- Check for any unclosed database connections or resources in your example code.