Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 5b0b103

Browse files
committed
Merge commit '31acc5c30' into anoa/dinsic_release_1_21_x
* commit '31acc5c30': Escape the error description on the sso_error template. (#8405) Fix occasional "Re-starting finished log context" from keyring (#8398) Allow existing users to login via OpenID Connect. (#8345) Fix schema delta for servers that have not backfilled (#8396) Fix MultiWriteIdGenerator's handling of restarts. (#8374) s/URLs/variables in changelog s/accidentally/incorrectly in changelog Update changelog wording Add type annotations to SimpleHttpClient (#8372) Add new sequences to port DB script (#8387) Add EventStreamPosition type (#8388) Mark the shadow_banned column as boolean in synapse_port_db. (#8386)
2 parents d70f909 + 31acc5c commit 5b0b103

40 files changed

+731
-264
lines changed

CHANGES.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ Bugfixes
55
--------
66

77
- Fix a bug introduced in v1.20.0 which caused the `synapse_port_db` script to fail. ([\#8386](https://github.com/matrix-org/synapse/issues/8386))
8-
- Fix URLs being accidentally escaped in Jinja2 templates. Broke in v1.20.0. ([\#8394](https://github.com/matrix-org/synapse/issues/8394))
8+
- Fix a bug introduced in v1.20.0 which caused variables to be incorrectly escaped in Jinja2 templates. ([\#8394](https://github.com/matrix-org/synapse/issues/8394))
99

1010

1111
Synapse 1.20.0 (2020-09-22)

changelog.d/8345.feature

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add a configuration option that allows existing users to log in with OpenID Connect. Contributed by @BBBSnowball and @OmmyZhang.

changelog.d/8372.misc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add type annotations to `SimpleHttpClient`.

changelog.d/8374.bugfix

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix theoretical race condition where events are not sent down `/sync` if the synchrotron worker is restarted without restarting other workers.

changelog.d/8386.bugfix

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix a bug introduced in v1.20.0 which caused the `synapse_port_db` script to fail.

changelog.d/8387.feature

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add experimental support for sharding event persister.

changelog.d/8388.misc

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add `EventStreamPosition` type.

changelog.d/8396.feature

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add experimental support for sharding event persister.

changelog.d/8398.bugfix

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix "Re-starting finished log context" warning when receiving an event we already had over federation.

changelog.d/8405.feature

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Consolidate the SSO error template across all configuration.

docs/sample_config.yaml

+5
Original file line numberDiff line numberDiff line change
@@ -1864,6 +1864,11 @@ oidc_config:
18641864
#
18651865
#skip_verification: true
18661866

1867+
# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
1868+
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
1869+
#
1870+
#allow_existing_users: true
1871+
18671872
# An external module can be provided here as a custom solution to mapping
18681873
# attributes returned from a OIDC provider onto a matrix user.
18691874
#

scripts/synapse_port_db

+24
Original file line numberDiff line numberDiff line change
@@ -630,6 +630,7 @@ class Porter(object):
630630
self.progress.set_state("Setting up sequence generators")
631631
await self._setup_state_group_id_seq()
632632
await self._setup_user_id_seq()
633+
await self._setup_events_stream_seqs()
633634

634635
self.progress.done()
635636
except Exception as e:
@@ -806,6 +807,29 @@ class Porter(object):
806807

807808
return self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
808809

810+
def _setup_events_stream_seqs(self):
811+
def r(txn):
812+
txn.execute("SELECT MAX(stream_ordering) FROM events")
813+
curr_id = txn.fetchone()[0]
814+
if curr_id:
815+
next_id = curr_id + 1
816+
txn.execute(
817+
"ALTER SEQUENCE events_stream_seq RESTART WITH %s", (next_id,)
818+
)
819+
820+
txn.execute("SELECT -MIN(stream_ordering) FROM events")
821+
curr_id = txn.fetchone()[0]
822+
if curr_id:
823+
next_id = curr_id + 1
824+
txn.execute(
825+
"ALTER SEQUENCE events_backfill_stream_seq RESTART WITH %s",
826+
(next_id,),
827+
)
828+
829+
return self.postgres_store.db_pool.runInteraction(
830+
"_setup_events_stream_seqs", r
831+
)
832+
809833

810834
##############################################
811835
# The following is simply UI stuff

synapse/appservice/api.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ async def _get() -> Optional[JsonDict]:
178178
urllib.parse.quote(protocol),
179179
)
180180
try:
181-
info = await self.get_json(uri, {})
181+
info = await self.get_json(uri)
182182

183183
if not _is_valid_3pe_metadata(info):
184184
logger.warning(

synapse/config/oidc_config.py

+6
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ def read_config(self, config, **kwargs):
5656
self.oidc_userinfo_endpoint = oidc_config.get("userinfo_endpoint")
5757
self.oidc_jwks_uri = oidc_config.get("jwks_uri")
5858
self.oidc_skip_verification = oidc_config.get("skip_verification", False)
59+
self.oidc_allow_existing_users = oidc_config.get("allow_existing_users", False)
5960

6061
ump_config = oidc_config.get("user_mapping_provider", {})
6162
ump_config.setdefault("module", DEFAULT_USER_MAPPING_PROVIDER)
@@ -158,6 +159,11 @@ def generate_config_section(self, config_dir_path, server_name, **kwargs):
158159
#
159160
#skip_verification: true
160161
162+
# Uncomment to allow a user logging in via OIDC to match a pre-existing account instead
163+
# of failing. This could be used if switching from password logins to OIDC. Defaults to false.
164+
#
165+
#allow_existing_users: true
166+
161167
# An external module can be provided here as a custom solution to mapping
162168
# attributes returned from a OIDC provider onto a matrix user.
163169
#

synapse/config/saml2_config.py

-6
Original file line numberDiff line numberDiff line change
@@ -169,12 +169,6 @@ def read_config(self, config, **kwargs):
169169
saml2_config.get("saml_session_lifetime", "15m")
170170
)
171171

172-
# We enable autoescape here as the message may potentially come from a
173-
# remote resource
174-
self.saml2_error_html_template = self.read_templates(
175-
["saml_error.html"], saml2_config.get("template_dir"), autoescape=True
176-
)[0]
177-
178172
def _default_saml_config_dict(
179173
self, required_attributes: set, optional_attributes: set
180174
):

synapse/crypto/keyring.py

+44-26
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,6 @@
4242
)
4343
from synapse.logging.context import (
4444
PreserveLoggingContext,
45-
current_context,
4645
make_deferred_yieldable,
4746
preserve_fn,
4847
run_in_background,
@@ -233,8 +232,6 @@ async def _start_key_lookups(self, verify_requests):
233232
"""
234233

235234
try:
236-
ctx = current_context()
237-
238235
# map from server name to a set of outstanding request ids
239236
server_to_request_ids = {}
240237

@@ -265,12 +262,8 @@ def lookup_done(res, verify_request):
265262

266263
# if there are no more requests for this server, we can drop the lock.
267264
if not server_requests:
268-
with PreserveLoggingContext(ctx):
269-
logger.debug("Releasing key lookup lock on %s", server_name)
270-
271-
# ... but not immediately, as that can cause stack explosions if
272-
# we get a long queue of lookups.
273-
self.clock.call_later(0, drop_server_lock, server_name)
265+
logger.debug("Releasing key lookup lock on %s", server_name)
266+
drop_server_lock(server_name)
274267

275268
return res
276269

@@ -335,20 +328,32 @@ async def do_iterations():
335328
)
336329

337330
# look for any requests which weren't satisfied
338-
with PreserveLoggingContext():
339-
for verify_request in remaining_requests:
340-
verify_request.key_ready.errback(
341-
SynapseError(
342-
401,
343-
"No key for %s with ids in %s (min_validity %i)"
344-
% (
345-
verify_request.server_name,
346-
verify_request.key_ids,
347-
verify_request.minimum_valid_until_ts,
348-
),
349-
Codes.UNAUTHORIZED,
350-
)
331+
while remaining_requests:
332+
verify_request = remaining_requests.pop()
333+
rq_str = (
334+
"VerifyJsonRequest(server=%s, key_ids=%s, min_valid=%i)"
335+
% (
336+
verify_request.server_name,
337+
verify_request.key_ids,
338+
verify_request.minimum_valid_until_ts,
351339
)
340+
)
341+
342+
# If we run the errback immediately, it may cancel our
343+
# loggingcontext while we are still in it, so instead we
344+
# schedule it for the next time round the reactor.
345+
#
346+
# (this also ensures that we don't get a stack overflow if we
347+
# has a massive queue of lookups waiting for this server).
348+
self.clock.call_later(
349+
0,
350+
verify_request.key_ready.errback,
351+
SynapseError(
352+
401,
353+
"Failed to find any key to satisfy %s" % (rq_str,),
354+
Codes.UNAUTHORIZED,
355+
),
356+
)
352357
except Exception as err:
353358
# we don't really expect to get here, because any errors should already
354359
# have been caught and logged. But if we do, let's log the error and make
@@ -410,10 +415,23 @@ async def _attempt_key_fetches_with_fetcher(self, fetcher, remaining_requests):
410415
# key was not valid at this point
411416
continue
412417

413-
with PreserveLoggingContext():
414-
verify_request.key_ready.callback(
415-
(server_name, key_id, fetch_key_result.verify_key)
416-
)
418+
# we have a valid key for this request. If we run the callback
419+
# immediately, it may cancel our loggingcontext while we are still in
420+
# it, so instead we schedule it for the next time round the reactor.
421+
#
422+
# (this also ensures that we don't get a stack overflow if we had
423+
# a massive queue of lookups waiting for this server).
424+
logger.debug(
425+
"Found key %s:%s for %s",
426+
server_name,
427+
key_id,
428+
verify_request.request_name,
429+
)
430+
self.clock.call_later(
431+
0,
432+
verify_request.key_ready.callback,
433+
(server_name, key_id, fetch_key_result.verify_key),
434+
)
417435
completed.append(verify_request)
418436
break
419437

synapse/handlers/federation.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -74,6 +74,8 @@
7474
from synapse.types import (
7575
JsonDict,
7676
MutableStateMap,
77+
PersistedEventPosition,
78+
RoomStreamToken,
7779
StateMap,
7880
UserID,
7981
get_domain_from_id,
@@ -2966,7 +2968,7 @@ async def persist_events_and_notify(
29662968
)
29672969
return result["max_stream_id"]
29682970
else:
2969-
max_stream_id = await self.storage.persistence.persist_events(
2971+
max_stream_token = await self.storage.persistence.persist_events(
29702972
event_and_contexts, backfilled=backfilled
29712973
)
29722974

@@ -2977,12 +2979,12 @@ async def persist_events_and_notify(
29772979

29782980
if not backfilled: # Never notify for backfilled events
29792981
for event, _ in event_and_contexts:
2980-
await self._notify_persisted_event(event, max_stream_id)
2982+
await self._notify_persisted_event(event, max_stream_token)
29812983

2982-
return max_stream_id
2984+
return max_stream_token.stream
29832985

29842986
async def _notify_persisted_event(
2985-
self, event: EventBase, max_stream_id: int
2987+
self, event: EventBase, max_stream_token: RoomStreamToken
29862988
) -> None:
29872989
"""Checks to see if notifier/pushers should be notified about the
29882990
event or not.
@@ -3008,9 +3010,11 @@ async def _notify_persisted_event(
30083010
elif event.internal_metadata.is_outlier():
30093011
return
30103012

3011-
event_stream_id = event.internal_metadata.stream_ordering
3013+
event_pos = PersistedEventPosition(
3014+
self._instance_name, event.internal_metadata.stream_ordering
3015+
)
30123016
self.notifier.on_new_room_event(
3013-
event, event_stream_id, max_stream_id, extra_users=extra_users
3017+
event, event_pos, max_stream_token, extra_users=extra_users
30143018
)
30153019

30163020
async def _clean_room_for_join(self, room_id: str) -> None:

synapse/handlers/message.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -1141,7 +1141,7 @@ def is_inviter_member_event(e):
11411141
if prev_state_ids:
11421142
raise AuthError(403, "Changing the room create event is forbidden")
11431143

1144-
event_stream_id, max_stream_id = await self.storage.persistence.persist_event(
1144+
event_pos, max_stream_token = await self.storage.persistence.persist_event(
11451145
event, context=context
11461146
)
11471147

@@ -1152,7 +1152,7 @@ def is_inviter_member_event(e):
11521152
def _notify():
11531153
try:
11541154
self.notifier.on_new_room_event(
1155-
event, event_stream_id, max_stream_id, extra_users=extra_users
1155+
event, event_pos, max_stream_token, extra_users=extra_users
11561156
)
11571157
except Exception:
11581158
logger.exception("Error notifying about new room event")
@@ -1164,7 +1164,7 @@ def _notify():
11641164
# matters as sometimes presence code can take a while.
11651165
run_in_background(self._bump_active_time, requester.user)
11661166

1167-
return event_stream_id
1167+
return event_pos.stream
11681168

11691169
async def _bump_active_time(self, user: UserID) -> None:
11701170
try:

synapse/handlers/oidc_handler.py

+27-15
Original file line numberDiff line numberDiff line change
@@ -114,6 +114,7 @@ def __init__(self, hs: "HomeServer"):
114114
hs.config.oidc_user_mapping_provider_config
115115
) # type: OidcMappingProvider
116116
self._skip_verification = hs.config.oidc_skip_verification # type: bool
117+
self._allow_existing_users = hs.config.oidc_allow_existing_users # type: bool
117118

118119
self._http_client = hs.get_proxied_http_client()
119120
self._auth_handler = hs.get_auth_handler()
@@ -849,7 +850,8 @@ async def _map_userinfo_to_user(
849850
If we don't find the user that way, we should register the user,
850851
mapping the localpart and the display name from the UserInfo.
851852
852-
If a user already exists with the mxid we've mapped, raise an exception.
853+
If a user already exists with the mxid we've mapped and allow_existing_users
854+
is disabled, raise an exception.
853855
854856
Args:
855857
userinfo: an object representing the user
@@ -905,21 +907,31 @@ async def _map_userinfo_to_user(
905907

906908
localpart = map_username_to_mxid_localpart(attributes["localpart"])
907909

908-
user_id = UserID(localpart, self._hostname)
909-
if await self._datastore.get_users_by_id_case_insensitive(user_id.to_string()):
910-
# This mxid is taken
911-
raise MappingException(
912-
"mxid '{}' is already taken".format(user_id.to_string())
910+
user_id = UserID(localpart, self._hostname).to_string()
911+
users = await self._datastore.get_users_by_id_case_insensitive(user_id)
912+
if users:
913+
if self._allow_existing_users:
914+
if len(users) == 1:
915+
registered_user_id = next(iter(users))
916+
elif user_id in users:
917+
registered_user_id = user_id
918+
else:
919+
raise MappingException(
920+
"Attempted to login as '{}' but it matches more than one user inexactly: {}".format(
921+
user_id, list(users.keys())
922+
)
923+
)
924+
else:
925+
# This mxid is taken
926+
raise MappingException("mxid '{}' is already taken".format(user_id))
927+
else:
928+
# It's the first time this user is logging in and the mapped mxid was
929+
# not taken, register the user
930+
registered_user_id = await self._registration_handler.register_user(
931+
localpart=localpart,
932+
default_display_name=attributes["display_name"],
933+
user_agent_ips=(user_agent, ip_address),
913934
)
914-
915-
# It's the first time this user is logging in and the mapped mxid was
916-
# not taken, register the user
917-
registered_user_id = await self._registration_handler.register_user(
918-
localpart=localpart,
919-
default_display_name=attributes["display_name"],
920-
user_agent_ips=(user_agent, ip_address),
921-
)
922-
923935
await self._datastore.record_user_external_id(
924936
self._auth_provider_id, remote_user_id, registered_user_id,
925937
)

0 commit comments

Comments
 (0)