mirror of
https://github.com/element-hq/synapse
synced 2024-07-07 00:53:29 +00:00
tests
This commit is contained in:
parent
ae65155d9f
commit
33775f599e
|
@ -36,10 +36,9 @@ from synapse.util import Clock
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.test_utils import SMALL_PNG
|
from tests.test_utils import SMALL_PNG
|
||||||
from tests.unittest import override_config
|
|
||||||
|
|
||||||
|
|
||||||
class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
|
class FederationMediaDownloadsTest(unittest.FederatingHomeserverTestCase):
|
||||||
|
|
||||||
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
super().prepare(reactor, clock, hs)
|
super().prepare(reactor, clock, hs)
|
||||||
|
@ -65,9 +64,6 @@ class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase
|
||||||
)
|
)
|
||||||
self.media_repo = hs.get_media_repository()
|
self.media_repo = hs.get_media_repository()
|
||||||
|
|
||||||
@override_config(
|
|
||||||
{"experimental_features": {"msc3916_authenticated_media_enabled": True}}
|
|
||||||
)
|
|
||||||
def test_file_download(self) -> None:
|
def test_file_download(self) -> None:
|
||||||
content = io.BytesIO(b"file_to_stream")
|
content = io.BytesIO(b"file_to_stream")
|
||||||
content_uri = self.get_success(
|
content_uri = self.get_success(
|
||||||
|
@ -82,7 +78,7 @@ class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase
|
||||||
# test with a text file
|
# test with a text file
|
||||||
channel = self.make_signed_federation_request(
|
channel = self.make_signed_federation_request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/_matrix/federation/unstable/org.matrix.msc3916/media/download/{content_uri.media_id}",
|
f"/_matrix/federation/v1/media/download/{content_uri.media_id}",
|
||||||
)
|
)
|
||||||
self.pump()
|
self.pump()
|
||||||
self.assertEqual(200, channel.code)
|
self.assertEqual(200, channel.code)
|
||||||
|
@ -106,7 +102,8 @@ class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase
|
||||||
|
|
||||||
# check that the text file and expected value exist
|
# check that the text file and expected value exist
|
||||||
found_file = any(
|
found_file = any(
|
||||||
"\r\nContent-Type: text/plain\r\n\r\nfile_to_stream" in field
|
"\r\nContent-Type: text/plain\r\nContent-Disposition: inline; filename=test_upload\r\n\r\nfile_to_stream"
|
||||||
|
in field
|
||||||
for field in stripped
|
for field in stripped
|
||||||
)
|
)
|
||||||
self.assertTrue(found_file)
|
self.assertTrue(found_file)
|
||||||
|
@ -124,7 +121,7 @@ class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase
|
||||||
# test with an image file
|
# test with an image file
|
||||||
channel = self.make_signed_federation_request(
|
channel = self.make_signed_federation_request(
|
||||||
"GET",
|
"GET",
|
||||||
f"/_matrix/federation/unstable/org.matrix.msc3916/media/download/{content_uri.media_id}",
|
f"/_matrix/federation/v1/media/download/{content_uri.media_id}",
|
||||||
)
|
)
|
||||||
self.pump()
|
self.pump()
|
||||||
self.assertEqual(200, channel.code)
|
self.assertEqual(200, channel.code)
|
||||||
|
@ -149,25 +146,3 @@ class FederationUnstableMediaDownloadsTest(unittest.FederatingHomeserverTestCase
|
||||||
# check that the png file exists and matches what was uploaded
|
# check that the png file exists and matches what was uploaded
|
||||||
found_file = any(SMALL_PNG in field for field in stripped_bytes)
|
found_file = any(SMALL_PNG in field for field in stripped_bytes)
|
||||||
self.assertTrue(found_file)
|
self.assertTrue(found_file)
|
||||||
|
|
||||||
@override_config(
|
|
||||||
{"experimental_features": {"msc3916_authenticated_media_enabled": False}}
|
|
||||||
)
|
|
||||||
def test_disable_config(self) -> None:
|
|
||||||
content = io.BytesIO(b"file_to_stream")
|
|
||||||
content_uri = self.get_success(
|
|
||||||
self.media_repo.create_content(
|
|
||||||
"text/plain",
|
|
||||||
"test_upload",
|
|
||||||
content,
|
|
||||||
46,
|
|
||||||
UserID.from_string("@user_id:whatever.org"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
channel = self.make_signed_federation_request(
|
|
||||||
"GET",
|
|
||||||
f"/_matrix/federation/unstable/org.matrix.msc3916/media/download/{content_uri.media_id}",
|
|
||||||
)
|
|
||||||
self.pump()
|
|
||||||
self.assertEqual(404, channel.code)
|
|
||||||
self.assertEqual(channel.json_body.get("errcode"), "M_UNRECOGNIZED")
|
|
||||||
|
|
|
@ -37,18 +37,155 @@ from synapse.http.client import (
|
||||||
BlocklistingAgentWrapper,
|
BlocklistingAgentWrapper,
|
||||||
BlocklistingReactorWrapper,
|
BlocklistingReactorWrapper,
|
||||||
BodyExceededMaxSize,
|
BodyExceededMaxSize,
|
||||||
|
MultipartResponse,
|
||||||
_DiscardBodyWithMaxSizeProtocol,
|
_DiscardBodyWithMaxSizeProtocol,
|
||||||
|
_MultipartParserProtocol,
|
||||||
read_body_with_max_size,
|
read_body_with_max_size,
|
||||||
|
read_multipart_response,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tests.server import FakeTransport, get_clock
|
from tests.server import FakeTransport, get_clock
|
||||||
from tests.unittest import TestCase
|
from tests.unittest import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
class ReadMultipartResponseTests(TestCase):
|
||||||
|
data1 = b"\r\n\r\n--6067d4698f8d40a0a794ea7d7379d53a\r\nContent-Type: application/json\r\n\r\n{}\r\n--6067d4698f8d40a0a794ea7d7379d53a\r\nContent-Type: text/plain\r\nContent-Disposition: inline; filename=test_upload\r\n\r\nfile_"
|
||||||
|
data2 = b"to_stream\r\n--6067d4698f8d40a0a794ea7d7379d53a--\r\n\r\n"
|
||||||
|
|
||||||
|
redirect_data = b"\r\n\r\n--6067d4698f8d40a0a794ea7d7379d53a\r\nContent-Type: application/json\r\n\r\n{}\r\n--6067d4698f8d40a0a794ea7d7379d53a\r\nLocation: https://cdn.example.org/ab/c1/2345.txt\r\n\r\n--6067d4698f8d40a0a794ea7d7379d53a--\r\n\r\n"
|
||||||
|
|
||||||
|
def _build_multipart_response(
|
||||||
|
self, response_length: Union[int, str], max_length: int
|
||||||
|
) -> Tuple[
|
||||||
|
BytesIO,
|
||||||
|
"Deferred[MultipartResponse]",
|
||||||
|
_MultipartParserProtocol,
|
||||||
|
]:
|
||||||
|
"""Start reading the body, returns the response, result and proto"""
|
||||||
|
response = Mock(length=response_length)
|
||||||
|
result = BytesIO()
|
||||||
|
boundary = "6067d4698f8d40a0a794ea7d7379d53a"
|
||||||
|
deferred = read_multipart_response(response, result, boundary, max_length)
|
||||||
|
|
||||||
|
# Fish the protocol out of the response.
|
||||||
|
protocol = response.deliverBody.call_args[0][0]
|
||||||
|
protocol.transport = Mock()
|
||||||
|
|
||||||
|
return result, deferred, protocol
|
||||||
|
|
||||||
|
def _assert_error(
|
||||||
|
self,
|
||||||
|
deferred: "Deferred[MultipartResponse]",
|
||||||
|
protocol: _MultipartParserProtocol,
|
||||||
|
) -> None:
|
||||||
|
"""Ensure that the expected error is received."""
|
||||||
|
assert isinstance(deferred.result, Failure)
|
||||||
|
self.assertIsInstance(deferred.result.value, BodyExceededMaxSize)
|
||||||
|
assert protocol.transport is not None
|
||||||
|
# type-ignore: presumably abortConnection has been replaced with a Mock.
|
||||||
|
protocol.transport.abortConnection.assert_called_once() # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
def _cleanup_error(self, deferred: "Deferred[MultipartResponse]") -> None:
|
||||||
|
"""Ensure that the error in the Deferred is handled gracefully."""
|
||||||
|
called = [False]
|
||||||
|
|
||||||
|
def errback(f: Failure) -> None:
|
||||||
|
called[0] = True
|
||||||
|
|
||||||
|
deferred.addErrback(errback)
|
||||||
|
self.assertTrue(called[0])
|
||||||
|
|
||||||
|
def test_parse_file(self) -> None:
|
||||||
|
"""
|
||||||
|
Check that a multipart response containing a file is properly parsed
|
||||||
|
into the json/file parts, and the json and file are properly captured
|
||||||
|
"""
|
||||||
|
result, deferred, protocol = self._build_multipart_response(249, 250)
|
||||||
|
|
||||||
|
# Start sending data.
|
||||||
|
protocol.dataReceived(self.data1)
|
||||||
|
protocol.dataReceived(self.data2)
|
||||||
|
# Close the connection.
|
||||||
|
protocol.connectionLost(Failure(ResponseDone()))
|
||||||
|
|
||||||
|
multipart_response: MultipartResponse = deferred.result # type: ignore[assignment]
|
||||||
|
|
||||||
|
self.assertEqual(multipart_response.json, b"{}")
|
||||||
|
self.assertEqual(result.getvalue(), b"file_to_stream")
|
||||||
|
self.assertEqual(multipart_response.length, len(b"file_to_stream"))
|
||||||
|
self.assertEqual(multipart_response.content_type, b"text/plain")
|
||||||
|
self.assertEqual(
|
||||||
|
multipart_response.disposition, b"inline; filename=test_upload"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_parse_redirect(self) -> None:
|
||||||
|
"""
|
||||||
|
check that a multipart response containing a redirect is properly parsed and redirect url is
|
||||||
|
returned
|
||||||
|
"""
|
||||||
|
result, deferred, protocol = self._build_multipart_response(249, 250)
|
||||||
|
|
||||||
|
# Start sending data.
|
||||||
|
protocol.dataReceived(self.redirect_data)
|
||||||
|
# Close the connection.
|
||||||
|
protocol.connectionLost(Failure(ResponseDone()))
|
||||||
|
|
||||||
|
multipart_response: MultipartResponse = deferred.result # type: ignore[assignment]
|
||||||
|
|
||||||
|
self.assertEqual(multipart_response.json, b"{}")
|
||||||
|
self.assertEqual(result.getvalue(), b"")
|
||||||
|
self.assertEqual(
|
||||||
|
multipart_response.url, b"https://cdn.example.org/ab/c1/2345.txt"
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_too_large(self) -> None:
|
||||||
|
"""A response which is too large raises an exception."""
|
||||||
|
result, deferred, protocol = self._build_multipart_response(UNKNOWN_LENGTH, 180)
|
||||||
|
|
||||||
|
# Start sending data.
|
||||||
|
protocol.dataReceived(self.data1)
|
||||||
|
|
||||||
|
self.assertEqual(result.getvalue(), b"file_")
|
||||||
|
self._assert_error(deferred, protocol)
|
||||||
|
self._cleanup_error(deferred)
|
||||||
|
|
||||||
|
def test_additional_data(self) -> None:
|
||||||
|
"""A connection can receive data after being closed."""
|
||||||
|
result, deferred, protocol = self._build_multipart_response(UNKNOWN_LENGTH, 180)
|
||||||
|
|
||||||
|
# Start sending data.
|
||||||
|
protocol.dataReceived(self.data1)
|
||||||
|
self._assert_error(deferred, protocol)
|
||||||
|
|
||||||
|
# More data might have come in.
|
||||||
|
protocol.dataReceived(self.data2)
|
||||||
|
|
||||||
|
self.assertEqual(result.getvalue(), b"file_")
|
||||||
|
self._assert_error(deferred, protocol)
|
||||||
|
self._cleanup_error(deferred)
|
||||||
|
|
||||||
|
def test_content_length(self) -> None:
|
||||||
|
"""The body shouldn't be read (at all) if the Content-Length header is too large."""
|
||||||
|
result, deferred, protocol = self._build_multipart_response(250, 1)
|
||||||
|
|
||||||
|
# Deferred shouldn't be called yet.
|
||||||
|
self.assertFalse(deferred.called)
|
||||||
|
|
||||||
|
# Start sending data.
|
||||||
|
protocol.dataReceived(self.data1)
|
||||||
|
self._assert_error(deferred, protocol)
|
||||||
|
self._cleanup_error(deferred)
|
||||||
|
|
||||||
|
# The data is never consumed.
|
||||||
|
self.assertEqual(result.getvalue(), b"")
|
||||||
|
|
||||||
|
|
||||||
class ReadBodyWithMaxSizeTests(TestCase):
|
class ReadBodyWithMaxSizeTests(TestCase):
|
||||||
def _build_response(
|
def _build_response(self, length: Union[int, str] = UNKNOWN_LENGTH) -> Tuple[
|
||||||
self, length: Union[int, str] = UNKNOWN_LENGTH
|
BytesIO,
|
||||||
) -> Tuple[BytesIO, "Deferred[int]", _DiscardBodyWithMaxSizeProtocol]:
|
"Deferred[int]",
|
||||||
|
_DiscardBodyWithMaxSizeProtocol,
|
||||||
|
]:
|
||||||
"""Start reading the body, returns the response, result and proto"""
|
"""Start reading the body, returns the response, result and proto"""
|
||||||
response = Mock(length=length)
|
response = Mock(length=length)
|
||||||
result = BytesIO()
|
result = BytesIO()
|
||||||
|
|
|
@ -129,7 +129,7 @@ class MediaStorageTests(unittest.HomeserverTestCase):
|
||||||
|
|
||||||
|
|
||||||
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
||||||
class _TestImage:
|
class TestImage:
|
||||||
"""An image for testing thumbnailing with the expected results
|
"""An image for testing thumbnailing with the expected results
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
|
@ -158,7 +158,7 @@ class _TestImage:
|
||||||
is_inline: bool = True
|
is_inline: bool = True
|
||||||
|
|
||||||
|
|
||||||
small_png = _TestImage(
|
small_png = TestImage(
|
||||||
SMALL_PNG,
|
SMALL_PNG,
|
||||||
b"image/png",
|
b"image/png",
|
||||||
b".png",
|
b".png",
|
||||||
|
@ -175,7 +175,7 @@ small_png = _TestImage(
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
small_png_with_transparency = _TestImage(
|
small_png_with_transparency = TestImage(
|
||||||
unhexlify(
|
unhexlify(
|
||||||
b"89504e470d0a1a0a0000000d49484452000000010000000101000"
|
b"89504e470d0a1a0a0000000d49484452000000010000000101000"
|
||||||
b"00000376ef9240000000274524e5300010194fdae0000000a4944"
|
b"00000376ef9240000000274524e5300010194fdae0000000a4944"
|
||||||
|
@ -188,7 +188,7 @@ small_png_with_transparency = _TestImage(
|
||||||
# different versions of Pillow.
|
# different versions of Pillow.
|
||||||
)
|
)
|
||||||
|
|
||||||
small_lossless_webp = _TestImage(
|
small_lossless_webp = TestImage(
|
||||||
unhexlify(
|
unhexlify(
|
||||||
b"524946461a000000574542505650384c0d0000002f0000001007" b"1011118888fe0700"
|
b"524946461a000000574542505650384c0d0000002f0000001007" b"1011118888fe0700"
|
||||||
),
|
),
|
||||||
|
@ -196,7 +196,7 @@ small_lossless_webp = _TestImage(
|
||||||
b".webp",
|
b".webp",
|
||||||
)
|
)
|
||||||
|
|
||||||
empty_file = _TestImage(
|
empty_file = TestImage(
|
||||||
b"",
|
b"",
|
||||||
b"image/gif",
|
b"image/gif",
|
||||||
b".gif",
|
b".gif",
|
||||||
|
@ -204,7 +204,7 @@ empty_file = _TestImage(
|
||||||
unable_to_thumbnail=True,
|
unable_to_thumbnail=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
SVG = _TestImage(
|
SVG = TestImage(
|
||||||
b"""<?xml version="1.0"?>
|
b"""<?xml version="1.0"?>
|
||||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||||
|
@ -236,7 +236,7 @@ urls = [
|
||||||
@parameterized_class(("test_image", "url"), itertools.product(test_images, urls))
|
@parameterized_class(("test_image", "url"), itertools.product(test_images, urls))
|
||||||
class MediaRepoTests(unittest.HomeserverTestCase):
|
class MediaRepoTests(unittest.HomeserverTestCase):
|
||||||
servlets = [media.register_servlets]
|
servlets = [media.register_servlets]
|
||||||
test_image: ClassVar[_TestImage]
|
test_image: ClassVar[TestImage]
|
||||||
hijack_auth = True
|
hijack_auth = True
|
||||||
user_id = "@test:user"
|
user_id = "@test:user"
|
||||||
url: ClassVar[str]
|
url: ClassVar[str]
|
||||||
|
|
|
@ -19,31 +19,54 @@
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
import base64
|
import base64
|
||||||
|
import io
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from typing import Any, Dict, Optional, Sequence, Tuple, Type
|
from typing import Any, BinaryIO, ClassVar, Dict, List, Optional, Sequence, Tuple, Type
|
||||||
|
from unittest.mock import MagicMock, Mock, patch
|
||||||
|
from urllib import parse
|
||||||
from urllib.parse import quote, urlencode
|
from urllib.parse import quote, urlencode
|
||||||
|
|
||||||
|
from parameterized import parameterized_class
|
||||||
|
|
||||||
|
from twisted.internet import defer
|
||||||
from twisted.internet._resolver import HostResolution
|
from twisted.internet._resolver import HostResolution
|
||||||
from twisted.internet.address import IPv4Address, IPv6Address
|
from twisted.internet.address import IPv4Address, IPv6Address
|
||||||
|
from twisted.internet.defer import Deferred
|
||||||
from twisted.internet.error import DNSLookupError
|
from twisted.internet.error import DNSLookupError
|
||||||
from twisted.internet.interfaces import IAddress, IResolutionReceiver
|
from twisted.internet.interfaces import IAddress, IResolutionReceiver
|
||||||
|
from twisted.python.failure import Failure
|
||||||
from twisted.test.proto_helpers import AccumulatingProtocol, MemoryReactor
|
from twisted.test.proto_helpers import AccumulatingProtocol, MemoryReactor
|
||||||
|
from twisted.web.http_headers import Headers
|
||||||
|
from twisted.web.iweb import UNKNOWN_LENGTH, IResponse
|
||||||
from twisted.web.resource import Resource
|
from twisted.web.resource import Resource
|
||||||
|
|
||||||
|
from synapse.api.errors import HttpResponseException
|
||||||
|
from synapse.api.ratelimiting import Ratelimiter
|
||||||
from synapse.config.oembed import OEmbedEndpointConfig
|
from synapse.config.oembed import OEmbedEndpointConfig
|
||||||
|
from synapse.http.client import MultipartResponse
|
||||||
|
from synapse.http.types import QueryParams
|
||||||
|
from synapse.logging.context import make_deferred_yieldable
|
||||||
from synapse.media._base import FileInfo
|
from synapse.media._base import FileInfo
|
||||||
from synapse.media.url_previewer import IMAGE_CACHE_EXPIRY_MS
|
from synapse.media.url_previewer import IMAGE_CACHE_EXPIRY_MS
|
||||||
from synapse.rest import admin
|
from synapse.rest import admin
|
||||||
from synapse.rest.client import login, media
|
from synapse.rest.client import login, media
|
||||||
from synapse.server import HomeServer
|
from synapse.server import HomeServer
|
||||||
from synapse.types import JsonDict
|
from synapse.types import JsonDict, UserID
|
||||||
from synapse.util import Clock
|
from synapse.util import Clock
|
||||||
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
from synapse.util.stringutils import parse_and_validate_mxc_uri
|
||||||
|
|
||||||
from tests import unittest
|
from tests import unittest
|
||||||
from tests.server import FakeTransport, ThreadedMemoryReactorClock
|
from tests.media.test_media_storage import (
|
||||||
|
SVG,
|
||||||
|
TestImage,
|
||||||
|
empty_file,
|
||||||
|
small_lossless_webp,
|
||||||
|
small_png,
|
||||||
|
small_png_with_transparency,
|
||||||
|
)
|
||||||
|
from tests.server import FakeChannel, FakeTransport, ThreadedMemoryReactorClock
|
||||||
from tests.test_utils import SMALL_PNG
|
from tests.test_utils import SMALL_PNG
|
||||||
from tests.unittest import override_config
|
from tests.unittest import override_config
|
||||||
|
|
||||||
|
@ -1607,3 +1630,504 @@ class UnstableMediaConfigTest(unittest.HomeserverTestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
channel.json_body["m.upload.size"], self.hs.config.media.max_upload_size
|
channel.json_body["m.upload.size"], self.hs.config.media.max_upload_size
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RemoteDownloadLimiterTestCase(unittest.HomeserverTestCase):
|
||||||
|
servlets = [
|
||||||
|
media.register_servlets,
|
||||||
|
login.register_servlets,
|
||||||
|
admin.register_servlets,
|
||||||
|
]
|
||||||
|
|
||||||
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
config = self.default_config()
|
||||||
|
|
||||||
|
self.storage_path = self.mktemp()
|
||||||
|
self.media_store_path = self.mktemp()
|
||||||
|
os.mkdir(self.storage_path)
|
||||||
|
os.mkdir(self.media_store_path)
|
||||||
|
config["media_store_path"] = self.media_store_path
|
||||||
|
|
||||||
|
provider_config = {
|
||||||
|
"module": "synapse.media.storage_provider.FileStorageProviderBackend",
|
||||||
|
"store_local": True,
|
||||||
|
"store_synchronous": False,
|
||||||
|
"store_remote": True,
|
||||||
|
"config": {"directory": self.storage_path},
|
||||||
|
}
|
||||||
|
|
||||||
|
config["media_storage_providers"] = [provider_config]
|
||||||
|
|
||||||
|
return self.setup_test_homeserver(config=config)
|
||||||
|
|
||||||
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
self.repo = hs.get_media_repository()
|
||||||
|
self.client = hs.get_federation_http_client()
|
||||||
|
self.store = hs.get_datastores().main
|
||||||
|
self.user = self.register_user("user", "pass")
|
||||||
|
self.tok = self.login("user", "pass")
|
||||||
|
|
||||||
|
# mock actually reading file body
|
||||||
|
def read_multipart_response_30MiB(*args: Any, **kwargs: Any) -> Deferred:
|
||||||
|
d: Deferred = defer.Deferred()
|
||||||
|
d.callback(MultipartResponse(b"{}", 31457280, b"img/png", None))
|
||||||
|
return d
|
||||||
|
|
||||||
|
def read_multipart_response_50MiB(*args: Any, **kwargs: Any) -> Deferred:
|
||||||
|
d: Deferred = defer.Deferred()
|
||||||
|
d.callback(MultipartResponse(b"{}", 31457280, b"img/png", None))
|
||||||
|
return d
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"synapse.http.matrixfederationclient.read_multipart_response",
|
||||||
|
read_multipart_response_30MiB,
|
||||||
|
)
|
||||||
|
def test_download_ratelimit_default(self) -> None:
|
||||||
|
"""
|
||||||
|
Test remote media download ratelimiting against default configuration - 500MB bucket
|
||||||
|
and 87kb/second drain rate
|
||||||
|
"""
|
||||||
|
|
||||||
|
# mock out actually sending the request, returns a 30MiB response
|
||||||
|
async def _send_request(*args: Any, **kwargs: Any) -> IResponse:
|
||||||
|
resp = MagicMock(spec=IResponse)
|
||||||
|
resp.code = 200
|
||||||
|
resp.length = 31457280
|
||||||
|
resp.headers = Headers(
|
||||||
|
{"Content-Type": ["multipart/mixed; boundary=gc0p4Jq0M2Yt08jU534c0p"]}
|
||||||
|
)
|
||||||
|
resp.phrase = b"OK"
|
||||||
|
return resp
|
||||||
|
|
||||||
|
self.client._send_request = _send_request # type: ignore
|
||||||
|
|
||||||
|
# first request should go through
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abc",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel.code == 200
|
||||||
|
|
||||||
|
# next 15 should go through
|
||||||
|
for i in range(15):
|
||||||
|
channel2 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_matrix/client/v1/media/download/remote.org/abc{i}",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel2.code == 200
|
||||||
|
|
||||||
|
# 17th will hit ratelimit
|
||||||
|
channel3 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcd",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel3.code == 429
|
||||||
|
|
||||||
|
# however, a request from a different IP will go through
|
||||||
|
channel4 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcde",
|
||||||
|
shorthand=False,
|
||||||
|
client_ip="187.233.230.159",
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel4.code == 200
|
||||||
|
|
||||||
|
# at 87Kib/s it should take about 2 minutes for enough to drain from bucket that another
|
||||||
|
# 30MiB download is authorized - The last download was blocked at 503,316,480.
|
||||||
|
# The next download will be authorized when bucket hits 492,830,720
|
||||||
|
# (524,288,000 total capacity - 31,457,280 download size) so 503,316,480 - 492,830,720 ~= 10,485,760
|
||||||
|
# needs to drain before another download will be authorized, that will take ~=
|
||||||
|
# 2 minutes (10,485,760/89,088/60)
|
||||||
|
self.reactor.pump([2.0 * 60.0])
|
||||||
|
|
||||||
|
# enough has drained and next request goes through
|
||||||
|
channel5 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcdef",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel5.code == 200
|
||||||
|
|
||||||
|
@override_config(
|
||||||
|
{
|
||||||
|
"remote_media_download_per_second": "50M",
|
||||||
|
"remote_media_download_burst_count": "50M",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
@patch(
|
||||||
|
"synapse.http.matrixfederationclient.read_multipart_response",
|
||||||
|
read_multipart_response_50MiB,
|
||||||
|
)
|
||||||
|
def test_download_rate_limit_config(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that download rate limit config options are correctly picked up and applied
|
||||||
|
"""
|
||||||
|
|
||||||
|
async def _send_request(*args: Any, **kwargs: Any) -> IResponse:
|
||||||
|
resp = MagicMock(spec=IResponse)
|
||||||
|
resp.code = 200
|
||||||
|
resp.length = 52428800
|
||||||
|
resp.headers = Headers(
|
||||||
|
{"Content-Type": ["multipart/mixed; boundary=gc0p4Jq0M2Yt08jU534c0p"]}
|
||||||
|
)
|
||||||
|
resp.phrase = b"OK"
|
||||||
|
return resp
|
||||||
|
|
||||||
|
self.client._send_request = _send_request # type: ignore
|
||||||
|
|
||||||
|
# first request should go through
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abc",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel.code == 200
|
||||||
|
|
||||||
|
# immediate second request should fail
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcd",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel.code == 429
|
||||||
|
|
||||||
|
# advance half a second
|
||||||
|
self.reactor.pump([0.5])
|
||||||
|
|
||||||
|
# request still fails
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcde",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel.code == 429
|
||||||
|
|
||||||
|
# advance another half second
|
||||||
|
self.reactor.pump([0.5])
|
||||||
|
|
||||||
|
# enough has drained from bucket and request is successful
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcdef",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel.code == 200
|
||||||
|
|
||||||
|
@patch(
|
||||||
|
"synapse.http.matrixfederationclient.read_multipart_response",
|
||||||
|
read_multipart_response_30MiB,
|
||||||
|
)
|
||||||
|
def test_download_ratelimit_max_size_sub(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that if no content-length is provided, the default max size is applied instead
|
||||||
|
"""
|
||||||
|
|
||||||
|
# mock out actually sending the request
|
||||||
|
async def _send_request(*args: Any, **kwargs: Any) -> IResponse:
|
||||||
|
resp = MagicMock(spec=IResponse)
|
||||||
|
resp.code = 200
|
||||||
|
resp.length = UNKNOWN_LENGTH
|
||||||
|
resp.headers = Headers(
|
||||||
|
{"Content-Type": ["multipart/mixed; boundary=gc0p4Jq0M2Yt08jU534c0p"]}
|
||||||
|
)
|
||||||
|
resp.phrase = b"OK"
|
||||||
|
return resp
|
||||||
|
|
||||||
|
self.client._send_request = _send_request # type: ignore
|
||||||
|
|
||||||
|
# ten requests should go through using the max size (500MB/50MB)
|
||||||
|
for i in range(10):
|
||||||
|
channel2 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_matrix/client/v1/media/download/remote.org/abc{i}",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel2.code == 200
|
||||||
|
|
||||||
|
# eleventh will hit ratelimit
|
||||||
|
channel3 = self.make_request(
|
||||||
|
"GET",
|
||||||
|
"/_matrix/client/v1/media/download/remote.org/abcd",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
assert channel3.code == 429
|
||||||
|
|
||||||
|
def test_file_download(self) -> None:
|
||||||
|
content = io.BytesIO(b"file_to_stream")
|
||||||
|
content_uri = self.get_success(
|
||||||
|
self.repo.create_content(
|
||||||
|
"text/plain",
|
||||||
|
"test_upload",
|
||||||
|
content,
|
||||||
|
46,
|
||||||
|
UserID.from_string("@user_id:whatever.org"),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# test with a text file
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_matrix/client/v1/media/download/test/{content_uri.media_id}",
|
||||||
|
shorthand=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
self.pump()
|
||||||
|
self.assertEqual(200, channel.code)
|
||||||
|
|
||||||
|
|
||||||
|
test_images = [
|
||||||
|
small_png,
|
||||||
|
small_png_with_transparency,
|
||||||
|
small_lossless_webp,
|
||||||
|
empty_file,
|
||||||
|
SVG,
|
||||||
|
]
|
||||||
|
input_values = [(x,) for x in test_images]
|
||||||
|
|
||||||
|
|
||||||
|
@parameterized_class(("test_image",), input_values)
|
||||||
|
class DownloadTestCase(unittest.HomeserverTestCase):
|
||||||
|
test_image: ClassVar[TestImage]
|
||||||
|
servlets = [
|
||||||
|
media.register_servlets,
|
||||||
|
login.register_servlets,
|
||||||
|
admin.register_servlets,
|
||||||
|
]
|
||||||
|
|
||||||
|
def make_homeserver(self, reactor: MemoryReactor, clock: Clock) -> HomeServer:
|
||||||
|
self.fetches: List[
|
||||||
|
Tuple[
|
||||||
|
"Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]]",
|
||||||
|
str,
|
||||||
|
str,
|
||||||
|
Optional[QueryParams],
|
||||||
|
]
|
||||||
|
] = []
|
||||||
|
|
||||||
|
def federation_get_file(
|
||||||
|
destination: str,
|
||||||
|
path: str,
|
||||||
|
output_stream: BinaryIO,
|
||||||
|
download_ratelimiter: Ratelimiter,
|
||||||
|
ip_address: Any,
|
||||||
|
max_size: int,
|
||||||
|
args: Optional[QueryParams] = None,
|
||||||
|
retry_on_dns_fail: bool = True,
|
||||||
|
ignore_backoff: bool = False,
|
||||||
|
follow_redirects: bool = False,
|
||||||
|
) -> "Deferred[Tuple[int, Dict[bytes, List[bytes]], bytes]]":
|
||||||
|
"""A mock for MatrixFederationHttpClient.federation_get_file."""
|
||||||
|
|
||||||
|
def write_to(
|
||||||
|
r: Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]
|
||||||
|
) -> Tuple[int, Dict[bytes, List[bytes]], bytes]:
|
||||||
|
data, response = r
|
||||||
|
output_stream.write(data)
|
||||||
|
return response
|
||||||
|
|
||||||
|
def write_err(f: Failure) -> Failure:
|
||||||
|
f.trap(HttpResponseException)
|
||||||
|
output_stream.write(f.value.response)
|
||||||
|
return f
|
||||||
|
|
||||||
|
d: Deferred[Tuple[bytes, Tuple[int, Dict[bytes, List[bytes]], bytes]]] = (
|
||||||
|
Deferred()
|
||||||
|
)
|
||||||
|
self.fetches.append((d, destination, path, args))
|
||||||
|
# Note that this callback changes the value held by d.
|
||||||
|
d_after_callback = d.addCallbacks(write_to, write_err)
|
||||||
|
return make_deferred_yieldable(d_after_callback)
|
||||||
|
|
||||||
|
# Mock out the homeserver's MatrixFederationHttpClient
|
||||||
|
client = Mock()
|
||||||
|
client.federation_get_file = federation_get_file
|
||||||
|
|
||||||
|
self.storage_path = self.mktemp()
|
||||||
|
self.media_store_path = self.mktemp()
|
||||||
|
os.mkdir(self.storage_path)
|
||||||
|
os.mkdir(self.media_store_path)
|
||||||
|
|
||||||
|
config = self.default_config()
|
||||||
|
config["media_store_path"] = self.media_store_path
|
||||||
|
config["max_image_pixels"] = 2000000
|
||||||
|
|
||||||
|
provider_config = {
|
||||||
|
"module": "synapse.media.storage_provider.FileStorageProviderBackend",
|
||||||
|
"store_local": True,
|
||||||
|
"store_synchronous": False,
|
||||||
|
"store_remote": True,
|
||||||
|
"config": {"directory": self.storage_path},
|
||||||
|
}
|
||||||
|
config["media_storage_providers"] = [provider_config]
|
||||||
|
config["experimental_features"] = {"msc3916_authenticated_media_enabled": True}
|
||||||
|
|
||||||
|
hs = self.setup_test_homeserver(config=config, federation_http_client=client)
|
||||||
|
|
||||||
|
return hs
|
||||||
|
|
||||||
|
def prepare(self, reactor: MemoryReactor, clock: Clock, hs: HomeServer) -> None:
|
||||||
|
self.store = hs.get_datastores().main
|
||||||
|
self.media_repo = hs.get_media_repository()
|
||||||
|
|
||||||
|
self.remote = "example.com"
|
||||||
|
self.media_id = "12345"
|
||||||
|
|
||||||
|
self.user = self.register_user("user", "pass")
|
||||||
|
self.tok = self.login("user", "pass")
|
||||||
|
|
||||||
|
def _req(
|
||||||
|
self, content_disposition: Optional[bytes], include_content_type: bool = True
|
||||||
|
) -> FakeChannel:
|
||||||
|
channel = self.make_request(
|
||||||
|
"GET",
|
||||||
|
f"/_matrix/client/v1/media/download/{self.remote}/{self.media_id}",
|
||||||
|
shorthand=False,
|
||||||
|
await_result=False,
|
||||||
|
access_token=self.tok,
|
||||||
|
)
|
||||||
|
self.pump()
|
||||||
|
|
||||||
|
# We've made one fetch, to example.com, using the federation media URL
|
||||||
|
self.assertEqual(len(self.fetches), 1)
|
||||||
|
self.assertEqual(self.fetches[0][1], "example.com")
|
||||||
|
self.assertEqual(
|
||||||
|
self.fetches[0][2], "/_matrix/federation/v1/media/download/" + self.media_id
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.fetches[0][3],
|
||||||
|
{"timeout_ms": "20000"},
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
b"Content-Length": [b"%d" % (len(self.test_image.data))],
|
||||||
|
}
|
||||||
|
|
||||||
|
if include_content_type:
|
||||||
|
headers[b"Content-Type"] = [self.test_image.content_type]
|
||||||
|
|
||||||
|
if content_disposition:
|
||||||
|
headers[b"Content-Disposition"] = [content_disposition]
|
||||||
|
|
||||||
|
self.fetches[0][0].callback(
|
||||||
|
(self.test_image.data, (len(self.test_image.data), headers, b"{}"))
|
||||||
|
)
|
||||||
|
|
||||||
|
self.pump()
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
|
||||||
|
return channel
|
||||||
|
|
||||||
|
def test_handle_missing_content_type(self) -> None:
|
||||||
|
channel = self._req(
|
||||||
|
b"attachment; filename=out" + self.test_image.extension,
|
||||||
|
include_content_type=False,
|
||||||
|
)
|
||||||
|
headers = channel.headers
|
||||||
|
self.assertEqual(channel.code, 200)
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Type"), [b"application/octet-stream"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_disposition_filename_ascii(self) -> None:
|
||||||
|
"""
|
||||||
|
If the filename is filename=<ascii> then Synapse will decode it as an
|
||||||
|
ASCII string, and use filename= in the response.
|
||||||
|
"""
|
||||||
|
channel = self._req(b"attachment; filename=out" + self.test_image.extension)
|
||||||
|
|
||||||
|
headers = channel.headers
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Type"), [self.test_image.content_type]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Disposition"),
|
||||||
|
[
|
||||||
|
(b"inline" if self.test_image.is_inline else b"attachment")
|
||||||
|
+ b"; filename=out"
|
||||||
|
+ self.test_image.extension
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_disposition_filenamestar_utf8escaped(self) -> None:
|
||||||
|
"""
|
||||||
|
If the filename is filename=*utf8''<utf8 escaped> then Synapse will
|
||||||
|
correctly decode it as the UTF-8 string, and use filename* in the
|
||||||
|
response.
|
||||||
|
"""
|
||||||
|
filename = parse.quote("\u2603".encode()).encode("ascii")
|
||||||
|
channel = self._req(
|
||||||
|
b"attachment; filename*=utf-8''" + filename + self.test_image.extension
|
||||||
|
)
|
||||||
|
|
||||||
|
headers = channel.headers
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Type"), [self.test_image.content_type]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Disposition"),
|
||||||
|
[
|
||||||
|
(b"inline" if self.test_image.is_inline else b"attachment")
|
||||||
|
+ b"; filename*=utf-8''"
|
||||||
|
+ filename
|
||||||
|
+ self.test_image.extension
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_disposition_none(self) -> None:
|
||||||
|
"""
|
||||||
|
If there is no filename, Content-Disposition should only
|
||||||
|
be a disposition type.
|
||||||
|
"""
|
||||||
|
channel = self._req(None)
|
||||||
|
|
||||||
|
headers = channel.headers
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Type"), [self.test_image.content_type]
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Content-Disposition"),
|
||||||
|
[b"inline" if self.test_image.is_inline else b"attachment"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_x_robots_tag_header(self) -> None:
|
||||||
|
"""
|
||||||
|
Tests that the `X-Robots-Tag` header is present, which informs web crawlers
|
||||||
|
to not index, archive, or follow links in media.
|
||||||
|
"""
|
||||||
|
channel = self._req(b"attachment; filename=out" + self.test_image.extension)
|
||||||
|
|
||||||
|
headers = channel.headers
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"X-Robots-Tag"),
|
||||||
|
[b"noindex, nofollow, noarchive, noimageindex"],
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_cross_origin_resource_policy_header(self) -> None:
|
||||||
|
"""
|
||||||
|
Test that the Cross-Origin-Resource-Policy header is set to "cross-origin"
|
||||||
|
allowing web clients to embed media from the downloads API.
|
||||||
|
"""
|
||||||
|
channel = self._req(b"attachment; filename=out" + self.test_image.extension)
|
||||||
|
|
||||||
|
headers = channel.headers
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
headers.getRawHeaders(b"Cross-Origin-Resource-Policy"),
|
||||||
|
[b"cross-origin"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# test that there is now auth
|
||||||
|
# test basic download
|
||||||
|
|
Loading…
Reference in a new issue