famedlysdk/lib/src/timeline.dart

290 lines
10 KiB
Dart
Raw Normal View History

2019-06-21 10:18:54 +00:00
/*
2020-06-03 10:16:01 +00:00
* Famedly Matrix SDK
* Copyright (C) 2019, 2020 Famedly GmbH
2019-06-21 10:18:54 +00:00
*
2020-06-03 10:16:01 +00:00
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
2019-06-21 10:18:54 +00:00
*
2020-06-03 10:16:01 +00:00
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
2019-06-21 10:18:54 +00:00
*
2020-06-03 10:16:01 +00:00
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
2019-06-21 10:18:54 +00:00
*/
import 'dart:async';
import '../matrix_api.dart';
import 'event.dart';
import 'room.dart';
2020-06-03 10:16:01 +00:00
import 'utils/event_update.dart';
import 'utils/logs.dart';
2020-06-03 10:16:01 +00:00
import 'utils/room_update.dart';
2019-06-21 10:18:54 +00:00
typedef onTimelineUpdateCallback = void Function();
typedef onTimelineInsertCallback = void Function(int insertID);
2019-06-21 10:18:54 +00:00
/// Represents the timeline of a room. The callbacks [onUpdate], [onDelete],
/// [onInsert] and [onResort] will be triggered automatically. The initial
/// event list will be retreived when created by the [room.getTimeline] method.
class Timeline {
final Room room;
List<Event> events = [];
2020-07-27 07:39:48 +00:00
/// Map of event ID to map of type to set of aggregated events
Map<String, Map<String, Set<Event>>> aggregatedEvents = {};
2019-06-25 10:06:26 +00:00
final onTimelineUpdateCallback onUpdate;
final onTimelineInsertCallback onInsert;
2019-06-21 10:18:54 +00:00
StreamSubscription<EventUpdate> sub;
StreamSubscription<RoomUpdate> roomSub;
2020-02-21 15:05:19 +00:00
StreamSubscription<String> sessionIdReceivedSub;
2019-09-30 09:21:57 +00:00
bool _requestingHistoryLock = false;
2019-06-21 10:18:54 +00:00
final Map<String, Event> _eventCache = {};
2019-11-29 11:12:04 +00:00
/// Searches for the event in this timeline. If not
/// found, requests from the server. Requested events
/// are cached.
Future<Event> getEventById(String id) async {
for (var i = 0; i < events.length; i++) {
2019-11-29 11:12:04 +00:00
if (events[i].eventId == id) return events[i];
}
if (_eventCache.containsKey(id)) return _eventCache[id];
final requestedEvent = await room.getEventById(id);
2019-11-29 11:12:04 +00:00
if (requestedEvent == null) return null;
_eventCache[id] = requestedEvent;
return _eventCache[id];
}
2019-09-30 09:21:57 +00:00
Future<void> requestHistory(
{int historyCount = Room.DefaultHistoryCount}) async {
if (!_requestingHistoryLock) {
_requestingHistoryLock = true;
await room.requestHistory(
historyCount: historyCount,
onHistoryReceived: () {
2020-07-27 07:39:48 +00:00
if (room.prev_batch.isEmpty || room.prev_batch == null) {
events.clear();
aggregatedEvents.clear();
}
2019-09-30 09:21:57 +00:00
},
);
await Future.delayed(const Duration(seconds: 2));
2019-09-30 09:21:57 +00:00
_requestingHistoryLock = false;
}
}
2019-06-21 10:18:54 +00:00
Timeline({this.room, this.events, this.onUpdate, this.onInsert}) {
2020-01-02 14:09:49 +00:00
sub ??= room.client.onEvent.stream.listen(_handleEventUpdate);
// if the timeline is limited we want to clear our events cache
// as r.limitedTimeline can be "null" sometimes, we need to check for == true
// as after receiving a limited timeline room update new events are expected
// to be received via the onEvent stream, it is unneeded to call sortAndUpdate
2020-05-22 10:12:18 +00:00
roomSub ??= room.client.onRoomUpdate.stream
.where((r) => r.id == room.id && r.limitedTimeline == true)
2020-07-27 07:39:48 +00:00
.listen((r) {
events.clear();
aggregatedEvents.clear();
});
2020-02-21 15:05:19 +00:00
sessionIdReceivedSub ??=
room.onSessionKeyReceived.stream.listen(_sessionKeyReceived);
2020-07-27 07:39:48 +00:00
// we want to populate our aggregated events
for (final e in events) {
addAggregatedEvent(e);
}
_sort();
2020-02-21 15:05:19 +00:00
}
/// Don't forget to call this before you dismiss this object!
void cancelSubscriptions() {
sub?.cancel();
roomSub?.cancel();
2020-02-21 15:05:19 +00:00
sessionIdReceivedSub?.cancel();
}
2020-05-22 11:15:48 +00:00
void _sessionKeyReceived(String sessionId) async {
var decryptAtLeastOneEvent = false;
2020-05-22 11:15:48 +00:00
final decryptFn = () async {
if (!room.client.encryptionEnabled) {
return;
}
2020-05-22 11:15:48 +00:00
for (var i = 0; i < events.length; i++) {
if (events[i].type == EventTypes.Encrypted &&
events[i].messageType == MessageTypes.BadEncrypted &&
events[i].content['session_id'] == sessionId) {
events[i] = await room.client.encryption
.decryptRoomEvent(room.id, events[i], store: true);
2020-05-22 11:15:48 +00:00
if (events[i].type != EventTypes.Encrypted) {
decryptAtLeastOneEvent = true;
}
2020-02-21 15:05:19 +00:00
}
}
2020-05-22 11:15:48 +00:00
};
if (room.client.database != null) {
await room.client.database.transaction(decryptFn);
} else {
await decryptFn();
2020-02-21 15:05:19 +00:00
}
if (decryptAtLeastOneEvent) onUpdate();
2019-06-21 10:18:54 +00:00
}
2019-07-23 09:09:13 +00:00
int _findEvent({String event_id, String unsigned_txid}) {
2020-07-23 08:09:00 +00:00
// we want to find any existing event where either the passed event_id or the passed unsigned_txid
// matches either the event_id or transaction_id of the existing event.
// For that we create two sets, searchNeedle, what we search, and searchHaystack, where we check if there is a match.
// Now, after having these two sets, if the intersect between them is non-empty, we know that we have at least one match in one pair,
// thus meaning we found our element.
final searchNeedle = <String>{};
if (event_id != null) {
searchNeedle.add(event_id);
}
if (unsigned_txid != null) {
searchNeedle.add(unsigned_txid);
}
int i;
for (i = 0; i < events.length; i++) {
2020-07-23 08:09:00 +00:00
final searchHaystack = <String>{};
if (events[i].eventId != null) {
searchHaystack.add(events[i].eventId);
}
if (events[i].unsigned != null &&
events[i].unsigned['transaction_id'] != null) {
searchHaystack.add(events[i].unsigned['transaction_id']);
}
if (searchNeedle.intersection(searchHaystack).isNotEmpty) {
break;
}
}
return i;
}
2020-07-27 07:39:48 +00:00
void _removeEventFromSet(Set<Event> eventSet, Event event) {
eventSet.removeWhere((e) =>
e.matchesEventOrTransactionId(event.eventId) ||
(event.unsigned != null &&
e.matchesEventOrTransactionId(event.unsigned['transaction_id'])));
}
void addAggregatedEvent(Event event) {
// we want to add an event to the aggregation tree
if (event.relationshipType == null || event.relationshipEventId == null) {
return; // nothing to do
}
if (!aggregatedEvents.containsKey(event.relationshipEventId)) {
aggregatedEvents[event.relationshipEventId] = <String, Set<Event>>{};
}
if (!aggregatedEvents[event.relationshipEventId]
.containsKey(event.relationshipType)) {
aggregatedEvents[event.relationshipEventId]
[event.relationshipType] = <Event>{};
}
// remove a potential old event
_removeEventFromSet(
aggregatedEvents[event.relationshipEventId][event.relationshipType],
event);
// add the new one
aggregatedEvents[event.relationshipEventId][event.relationshipType]
.add(event);
}
void removeAggregatedEvent(Event event) {
aggregatedEvents.remove(event.eventId);
if (event.unsigned != null) {
aggregatedEvents.remove(event.unsigned['transaction_id']);
}
for (final types in aggregatedEvents.values) {
for (final events in types.values) {
_removeEventFromSet(events, event);
}
}
}
2019-06-21 10:18:54 +00:00
void _handleEventUpdate(EventUpdate eventUpdate) async {
try {
if (eventUpdate.roomID != room.id) return;
2020-10-22 10:21:20 +00:00
if (eventUpdate.type == EventUpdateType.timeline ||
eventUpdate.type == EventUpdateType.history) {
var status = eventUpdate.content['status'] ??
(eventUpdate.content['unsigned'] is Map<String, dynamic>
? eventUpdate.content['unsigned'][MessageSendingStatusKey]
: null) ??
2;
2019-12-12 12:19:18 +00:00
// Redaction events are handled as modification for existing events.
2020-06-03 10:16:01 +00:00
if (eventUpdate.eventType == EventTypes.Redaction) {
final eventId = _findEvent(event_id: eventUpdate.content['redacts']);
2020-08-05 05:57:02 +00:00
if (eventId < events.length) {
2020-07-27 07:39:48 +00:00
removeAggregatedEvent(events[eventId]);
2020-05-22 10:12:18 +00:00
events[eventId].setRedactionEvent(Event.fromJson(
eventUpdate.content, room, eventUpdate.sortOrder));
2019-12-12 12:19:18 +00:00
}
2020-07-23 08:09:00 +00:00
} else if (status == -2) {
var i = _findEvent(event_id: eventUpdate.content['event_id']);
2020-07-27 07:39:48 +00:00
if (i < events.length) {
removeAggregatedEvent(events[i]);
events.removeAt(i);
}
2020-07-23 08:09:00 +00:00
} else {
var i = _findEvent(
event_id: eventUpdate.content['event_id'],
2020-06-29 12:02:18 +00:00
unsigned_txid: eventUpdate.content['unsigned'] is Map
? eventUpdate.content['unsigned']['transaction_id']
: null);
2019-06-26 14:36:34 +00:00
if (i < events.length) {
2020-07-23 08:09:00 +00:00
// if the old status is larger than the new one, we also want to preserve the old status
final oldStatus = events[i].status;
2020-05-22 10:12:18 +00:00
events[i] = Event.fromJson(
eventUpdate.content, room, eventUpdate.sortOrder);
2020-07-23 08:09:00 +00:00
// do we preserve the status? we should allow 0 -> -1 updates and status increases
if (status < oldStatus && !(status == -1 && oldStatus == 0)) {
events[i].status = oldStatus;
}
2020-07-27 07:39:48 +00:00
addAggregatedEvent(events[i]);
2020-07-23 08:09:00 +00:00
} else {
var newEvent = Event.fromJson(
eventUpdate.content, room, eventUpdate.sortOrder);
2019-06-21 10:18:54 +00:00
2020-10-22 10:21:20 +00:00
if (eventUpdate.type == EventUpdateType.history &&
2020-07-23 08:09:00 +00:00
events.indexWhere(
(e) => e.eventId == eventUpdate.content['event_id']) !=
-1) return;
2019-09-30 09:21:57 +00:00
2020-07-23 08:09:00 +00:00
events.insert(0, newEvent);
2020-07-27 07:39:48 +00:00
addAggregatedEvent(newEvent);
2020-07-23 08:09:00 +00:00
if (onInsert != null) onInsert(0);
}
2019-06-26 14:36:34 +00:00
}
2019-06-21 10:18:54 +00:00
}
2020-08-21 05:56:39 +00:00
_sort();
if (onUpdate != null) onUpdate();
2020-08-05 05:57:02 +00:00
} catch (e, s) {
2020-08-06 09:35:02 +00:00
Logs.warning('Handle event update failed: ${e.toString()}', s);
2019-06-21 10:18:54 +00:00
}
}
2019-06-21 11:30:39 +00:00
2020-08-21 05:56:39 +00:00
bool _sortLock = false;
2019-10-01 09:39:15 +00:00
2020-08-21 05:56:39 +00:00
void _sort() {
if (_sortLock || events.length < 2) return;
_sortLock = true;
events?.sort((a, b) {
if (b.status == -1 && a.status != -1) {
return 1;
}
if (a.status == -1 && b.status != -1) {
return -1;
}
return b.sortOrder - a.sortOrder > 0 ? 1 : -1;
});
2020-08-21 05:56:39 +00:00
_sortLock = false;
2019-06-21 11:30:39 +00:00
}
2019-06-21 10:18:54 +00:00
}