Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion changelog.d/7736.feature

This file was deleted.

1 change: 1 addition & 0 deletions changelog.d/8039.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Revert MSC2654 implementation because of perf issues. Please delete this line when processing the 1.19 changelog.
2 changes: 1 addition & 1 deletion scripts/synapse_port_db
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ logger = logging.getLogger("synapse_port_db")


BOOLEAN_COLUMNS = {
"events": ["processed", "outlier", "contains_url", "count_as_unread"],
"events": ["processed", "outlier", "contains_url"],
"rooms": ["is_public"],
"event_edges": ["is_state"],
"presence_list": ["accepted"],
Expand Down
6 changes: 0 additions & 6 deletions synapse/handlers/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,6 @@ class JoinedSyncResult:
account_data = attr.ib(type=List[JsonDict])
unread_notifications = attr.ib(type=JsonDict)
summary = attr.ib(type=Optional[JsonDict])
unread_count = attr.ib(type=int)

def __nonzero__(self) -> bool:
"""Make the result appear empty if there are no updates. This is used
Expand Down Expand Up @@ -1887,10 +1886,6 @@ async def _generate_room_entry(

if room_builder.rtype == "joined":
unread_notifications = {} # type: Dict[str, str]

unread_count = await self.store.get_unread_message_count_for_user(
room_id, sync_config.user.to_string(),
)
room_sync = JoinedSyncResult(
room_id=room_id,
timeline=batch,
Expand All @@ -1899,7 +1894,6 @@ async def _generate_room_entry(
account_data=account_data_events,
unread_notifications=unread_notifications,
summary=summary,
unread_count=unread_count,
)

if room_sync or always_include:
Expand Down
17 changes: 13 additions & 4 deletions synapse/push/push_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,22 @@ async def get_badge_count(store, user_id):
invites = await store.get_invited_rooms_for_local_user(user_id)
joins = await store.get_rooms_for_user(user_id)

my_receipts_by_room = await store.get_receipts_for_user(user_id, "m.read")

badge = len(invites)

for room_id in joins:
unread_count = await store.get_unread_message_count_for_user(room_id, user_id)
# return one badge count per conversation, as count per
# message is so noisy as to be almost useless
badge += 1 if unread_count else 0
if room_id in my_receipts_by_room:
last_unread_event_id = my_receipts_by_room[room_id]

notifs = await (
store.get_unread_event_push_actions_by_room_for_user(
room_id, user_id, last_unread_event_id
)
)
# return one badge count per conversation, as count per
# message is so noisy as to be almost useless
badge += 1 if notifs["notify_count"] else 0
return badge


Expand Down
1 change: 0 additions & 1 deletion synapse/rest/client/v2_alpha/sync.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,6 @@ def serialize(events):
result["ephemeral"] = {"events": ephemeral_events}
result["unread_notifications"] = room.unread_notifications
result["summary"] = room.summary
result["org.matrix.msc2654.unread_count"] = room.unread_count

return result

Expand Down
1 change: 0 additions & 1 deletion synapse/storage/databases/main/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ def _invalidate_caches_for_event(

self.get_latest_event_ids_in_room.invalidate((room_id,))

self.get_unread_message_count_for_user.invalidate_many((room_id,))
self.get_unread_event_push_actions_by_room_for_user.invalidate_many((room_id,))

if not backfilled:
Expand Down
48 changes: 1 addition & 47 deletions synapse/storage/databases/main/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,47 +53,6 @@
["type", "origin_type", "origin_entity"],
)

STATE_EVENT_TYPES_TO_MARK_UNREAD = {
EventTypes.Topic,
EventTypes.Name,
EventTypes.RoomAvatar,
EventTypes.Tombstone,
}


def should_count_as_unread(event: EventBase, context: EventContext) -> bool:
# Exclude rejected and soft-failed events.
if context.rejected or event.internal_metadata.is_soft_failed():
return False

# Exclude notices.
if (
not event.is_state()
and event.type == EventTypes.Message
and event.content.get("msgtype") == "m.notice"
):
return False

# Exclude edits.
relates_to = event.content.get("m.relates_to", {})
if relates_to.get("rel_type") == RelationTypes.REPLACE:
return False

# Mark events that have a non-empty string body as unread.
body = event.content.get("body")
if isinstance(body, str) and body:
return True

# Mark some state events as unread.
if event.is_state() and event.type in STATE_EVENT_TYPES_TO_MARK_UNREAD:
return True

# Mark encrypted events as unread.
if not event.is_state() and event.type == EventTypes.Encrypted:
return True

return False


def encode_json(json_object):
"""
Expand Down Expand Up @@ -239,10 +198,6 @@ def _persist_events_and_state_updates(

event_counter.labels(event.type, origin_type, origin_entity).inc()

self.store.get_unread_message_count_for_user.invalidate_many(
(event.room_id,),
)

for room_id, new_state in current_state_for_room.items():
self.store.get_current_state_ids.prefill((room_id,), new_state)

Expand Down Expand Up @@ -864,9 +819,8 @@ def event_dict(event):
"contains_url": (
"url" in event.content and isinstance(event.content["url"], str)
),
"count_as_unread": should_count_as_unread(event, context),
}
for event, context in events_and_contexts
for event, _ in events_and_contexts
],
)

Expand Down
86 changes: 1 addition & 85 deletions synapse/storage/databases/main/events_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,9 @@
from synapse.replication.tcp.streams.events import EventsStream
from synapse.storage._base import SQLBaseStore, db_to_json, make_in_list_sql_clause
from synapse.storage.database import DatabasePool
from synapse.storage.types import Cursor
from synapse.storage.util.id_generators import StreamIdGenerator
from synapse.types import get_domain_from_id
from synapse.util.caches.descriptors import (
Cache,
_CacheContext,
cached,
cachedInlineCallbacks,
)
from synapse.util.caches.descriptors import Cache, cached, cachedInlineCallbacks
from synapse.util.iterutils import batch_iter
from synapse.util.metrics import Measure

Expand Down Expand Up @@ -1364,84 +1358,6 @@ def get_next_event_to_expire_txn(txn):
desc="get_next_event_to_expire", func=get_next_event_to_expire_txn
)

@cached(tree=True, cache_context=True)
async def get_unread_message_count_for_user(
self, room_id: str, user_id: str, cache_context: _CacheContext,
) -> int:
"""Retrieve the count of unread messages for the given room and user.

Args:
room_id: The ID of the room to count unread messages in.
user_id: The ID of the user to count unread messages for.

Returns:
The number of unread messages for the given user in the given room.
"""
with Measure(self._clock, "get_unread_message_count_for_user"):
last_read_event_id = await self.get_last_receipt_event_id_for_user(
user_id=user_id,
room_id=room_id,
receipt_type="m.read",
on_invalidate=cache_context.invalidate,
)

return await self.db_pool.runInteraction(
"get_unread_message_count_for_user",
self._get_unread_message_count_for_user_txn,
user_id,
room_id,
last_read_event_id,
)

def _get_unread_message_count_for_user_txn(
self,
txn: Cursor,
user_id: str,
room_id: str,
last_read_event_id: Optional[str],
) -> int:
if last_read_event_id:
# Get the stream ordering for the last read event.
stream_ordering = self.db_pool.simple_select_one_onecol_txn(
txn=txn,
table="events",
keyvalues={"room_id": room_id, "event_id": last_read_event_id},
retcol="stream_ordering",
)
else:
# If there's no read receipt for that room, it probably means the user hasn't
# opened it yet, in which case use the stream ID of their join event.
# We can't just set it to 0 otherwise messages from other local users from
# before this user joined will be counted as well.
txn.execute(
"""
SELECT stream_ordering FROM local_current_membership
LEFT JOIN events USING (event_id, room_id)
WHERE membership = 'join'
AND user_id = ?
AND room_id = ?
""",
(user_id, room_id),
)
row = txn.fetchone()

if row is None:
return 0

stream_ordering = row[0]

# Count the messages that qualify as unread after the stream ordering we've just
# retrieved.
sql = """
SELECT COUNT(*) FROM events
WHERE sender != ? AND room_id = ? AND stream_ordering > ? AND count_as_unread
"""

txn.execute(sql, (user_id, room_id, stream_ordering))
row = txn.fetchone()

return row[0] if row else 0


AllNewEventsResult = namedtuple(
"AllNewEventsResult",
Expand Down

This file was deleted.

20 changes: 0 additions & 20 deletions tests/rest/client/v1/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,26 +165,6 @@ def send_event(

return channel.json_body

def redact(self, room_id, event_id, txn_id=None, tok=None, expect_code=200):
if txn_id is None:
txn_id = "m%s" % (str(time.time()))

path = "/_matrix/client/r0/rooms/%s/redact/%s/%s" % (room_id, event_id, txn_id)
if tok:
path = path + "?access_token=%s" % tok

request, channel = make_request(
self.hs.get_reactor(), "PUT", path, json.dumps({}).encode("utf8")
)
render(request, self.resource, self.hs.get_reactor())

assert int(channel.result["code"]) == expect_code, (
"Expected: %d, got: %d, resp: %r"
% (expect_code, int(channel.result["code"]), channel.result["body"])
)

return channel.json_body

def _read_write_state(
self,
room_id: str,
Expand Down
Loading