Start implementing auth chains

This commit is contained in:
Erik Johnston 2014-11-06 18:42:18 +00:00
parent 8421cabb9d
commit bf6b72eb55
8 changed files with 115 additions and 6 deletions

View file

@ -21,8 +21,7 @@ from synapse.api.constants import Membership, JoinRules
from synapse.api.errors import AuthError, StoreError, Codes, SynapseError
from synapse.api.events.room import (
RoomMemberEvent, RoomPowerLevelsEvent, RoomRedactionEvent,
RoomJoinRulesEvent, InviteJoinEvent,
RoomCreateEvent,
RoomJoinRulesEvent, RoomCreateEvent,
)
from synapse.util.logutils import log_function

View file

@ -61,7 +61,7 @@ class SynapseEvent(JsonEncodedObject):
"replaces_state",
"redacted_because",
"origin_server_ts",
"auth_chains",
"auth_events",
]
internal_keys = [

View file

@ -14,11 +14,15 @@
# limitations under the License.
from twisted.internet import defer
from synapse.api.errors import LimitExceededError
from synapse.util.async import run_on_reactor
from synapse.crypto.event_signing import add_hashes_and_signatures
from synapse.api.events.room import (
RoomCreateEvent, RoomMemberEvent, RoomPowerLevelsEvent, RoomJoinRulesEvent,
)
from synapse.api.constants import Membership, JoinRules
from syutil.base64util import encode_base64
import logging
@ -55,6 +59,53 @@ class BaseHandler(object):
retry_after_ms=int(1000*(time_allowed - time_now)),
)
@defer.inlineCallbacks
def _add_auth(self, event):
if event.type == RoomCreateEvent.TYPE:
event.auth_events = []
return
auth_events = []
key = (RoomPowerLevelsEvent.TYPE, "", )
power_level_event = event.old_state_events.get(key)
if power_level_event:
auth_events.append(power_level_event.event_id)
key = (RoomJoinRulesEvent.TYPE, "", )
join_rule_event = event.old_state_events.get(key)
key = (RoomMemberEvent.TYPE, event.user_id, )
member_event = event.old_state_events.get(key)
if join_rule_event:
join_rule = join_rule_event.content.get("join_rule")
is_public = join_rule == JoinRules.PUBLIC if join_rule else False
if event.type == RoomMemberEvent.TYPE:
if event.content["membership"] == Membership.JOIN:
if is_public:
auth_events.append(join_rule_event.event_id)
elif member_event:
auth_events.append(member_event.event_id)
if member_event:
if member_event.content["membership"] == Membership.JOIN:
auth_events.append(member_event.event_id)
hashes = yield self.store.get_event_reference_hashes(
auth_events
)
hashes = [
{
k: encode_base64(v) for k, v in h.items()
if k == "sha256"
}
for h in hashes
]
event.auth_events = zip(auth_events, hashes)
@defer.inlineCallbacks
def _on_new_room_event(self, event, snapshot, extra_destinations=[],
extra_users=[], suppress_auth=False):
@ -64,6 +115,8 @@ class BaseHandler(object):
yield self.state_handler.annotate_state_groups(event)
yield self._add_auth(event)
logger.debug("Signing event...")
add_hashes_and_signatures(
@ -76,6 +129,8 @@ class BaseHandler(object):
logger.debug("Authing...")
self.auth.check(event, raises=True)
logger.debug("Authed")
else:
logger.debug("Suppressed auth.")
yield self.store.persist_event(event)

View file

@ -19,7 +19,6 @@ from synapse.api.events.room import (
RoomMemberEvent, RoomTopicEvent, FeedbackEvent,
RoomNameEvent,
RoomJoinRulesEvent,
RoomPowerLevelsEvent,
RoomRedactionEvent,
)
@ -302,6 +301,17 @@ class DataStore(RoomMemberStore, RoomStore,
txn, event.event_id, prev_event_id, alg, hash_bytes
)
for auth_id, _ in event.auth_events:
self._simple_insert_txn(
txn,
table="event_auth",
values={
"event_id": event.event_id,
"room_id": event.room_id,
"auth_id": auth_id,
},
)
(ref_alg, ref_hash_bytes) = compute_event_reference_hash(event)
self._store_event_reference_hash_txn(
txn, event.event_id, ref_alg, ref_hash_bytes

View file

@ -474,6 +474,8 @@ class SQLBaseStore(object):
if is_state == 0
]
ev.auth_events = self._get_auth_events(txn, ev.event_id)
if hasattr(ev, "state_key"):
ev.prev_state = [
(e_id, h)

View file

@ -139,6 +139,27 @@ class EventFederationStore(SQLBaseStore):
return results
def _get_auth_events(self, txn, event_id):
auth_ids = self._simple_select_onecol_txn(
txn,
table="event_auth",
keyvalues={
"event_id": event_id,
},
retcol="auth_id",
)
results = []
for auth_id in auth_ids:
hashes = self._get_event_reference_hashes_txn(txn, auth_id)
prev_hashes = {
k: encode_base64(v) for k, v in hashes.items()
if k == "sha256"
}
results.append((auth_id, prev_hashes))
return results
def get_min_depth(self, room_id):
return self.runInteraction(
"get_min_depth",

View file

@ -63,3 +63,13 @@ CREATE INDEX IF NOT EXISTS st_extrem_keys ON state_forward_extremities(
);
CREATE INDEX IF NOT EXISTS st_extrem_id ON state_forward_extremities(event_id);
CREATE TABLE IF NOT EXISTS event_auth(
event_id TEXT NOT NULL,
auth_id TEXT NOT NULL,
room_id TEXT NOT NULL,
CONSTRAINT uniqueness UNIQUE (event_id, auth_id, room_id)
);
CREATE INDEX IF NOT EXISTS evauth_edges_id ON event_auth(event_id);
CREATE INDEX IF NOT EXISTS evauth_edges_auth_id ON event_auth(auth_id);

View file

@ -55,6 +55,18 @@ class SignatureStore(SQLBaseStore):
or_ignore=True,
)
def get_event_reference_hashes(self, event_ids):
def f(txn):
return [
self._get_event_reference_hashes_txn(txn, ev)
for ev in event_ids
]
return self.runInteraction(
"get_event_reference_hashes",
f
)
def _get_event_reference_hashes_txn(self, txn, event_id):
"""Get all the hashes for a given PDU.
Args: