Merge pull request #6982 from vector-im/resilience-rc

Resilience rc
This commit is contained in:
aringenbach 2022-11-07 15:57:09 +01:00 committed by GitHub
commit bacad0df3c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
73 changed files with 758 additions and 326 deletions

View file

@ -0,0 +1,23 @@
{
"images" : [
{
"filename" : "action_formatting_disabled.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"filename" : "action_formatting_disabled@2x.png",
"idiom" : "universal",
"scale" : "2x"
},
{
"filename" : "action_formatting_disabled@3x.png",
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 513 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 894 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

@ -0,0 +1,23 @@
{
"images" : [
{
"filename" : "action_formatting_enabled.png",
"idiom" : "universal",
"scale" : "1x"
},
{
"filename" : "action_formatting_enabled@2x.png",
"idiom" : "universal",
"scale" : "2x"
},
{
"filename" : "action_formatting_enabled@3x.png",
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 530 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 895 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "voice_broadcast_tile_live.svg",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View file

@ -0,0 +1,7 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M13.4589 2.7911C13.2328 2.50046 12.814 2.44811 12.5234 2.67415C12.233 2.89995 12.1805 3.31813 12.4057 3.60872L12.4062 3.60937L12.4068 3.61023L12.4159 3.62234C12.4248 3.6342 12.439 3.65358 12.4578 3.68014C12.4956 3.73328 12.5517 3.81499 12.6202 3.92259C12.7574 4.13815 12.9427 4.45557 13.1285 4.85374C13.502 5.6541 13.866 6.756 13.866 8.00039C13.866 9.24478 13.502 10.3467 13.1285 11.147C12.9427 11.5452 12.7574 11.8626 12.6202 12.0782C12.5517 12.1858 12.4956 12.2675 12.4578 12.3206C12.439 12.3472 12.4248 12.3666 12.4159 12.3784L12.4068 12.3905L12.4062 12.3914L12.4056 12.3922C12.1805 12.6828 12.2331 13.1009 12.5234 13.3266C12.814 13.5527 13.2328 13.5003 13.4589 13.2097L12.962 12.8232C13.4589 13.2097 13.4589 13.2097 13.4589 13.2097L13.4602 13.208L13.4621 13.2055L13.4677 13.1983L13.4853 13.1748C13.4999 13.1552 13.5201 13.1277 13.5449 13.0926C13.5947 13.0226 13.6636 12.9221 13.7451 12.794C13.9079 12.5381 14.1226 12.1699 14.3368 11.7109C14.7633 10.797 15.1993 9.49886 15.1993 8.00039C15.1993 6.50192 14.7633 5.20382 14.3368 4.28989C14.1226 3.83092 13.9079 3.46263 13.7451 3.20676C13.6636 3.07865 13.5947 2.97821 13.5449 2.90814C13.5201 2.87309 13.4999 2.84559 13.4853 2.82598L13.4677 2.80251L13.4621 2.79528L13.4602 2.79281L13.4595 2.79185C13.4595 2.79185 13.4589 2.7911 12.9326 3.20039L13.4589 2.7911Z" fill="#737D8C"/>
<path d="M11.7261 5.19124C11.5001 4.90061 11.0812 4.84825 10.7906 5.0743C10.5007 5.29976 10.4479 5.71701 10.6719 6.00755L10.6742 6.0106C10.6772 6.0146 10.6828 6.02225 10.6907 6.03341C10.7066 6.05575 10.7315 6.09192 10.7625 6.1406C10.8246 6.2383 10.91 6.38429 10.9958 6.56817C11.1693 6.93996 11.3332 7.44186 11.3332 8.00054C11.3332 8.55921 11.1693 9.06111 10.9958 9.4329C10.91 9.61678 10.8246 9.76277 10.7625 9.86047C10.7315 9.90915 10.7066 9.94532 10.6907 9.96766C10.6828 9.97881 10.6772 9.98647 10.6742 9.99047L10.6719 9.99353C10.4479 10.2841 10.5007 10.7013 10.7906 10.9268C11.0812 11.1528 11.5001 11.1005 11.7261 10.8098L11.1999 10.4005C11.7261 10.8098 11.7261 10.8098 11.7261 10.8098L11.7273 10.8083L11.7288 10.8064L11.7326 10.8014L11.7436 10.7868C11.7523 10.7751 11.7639 10.7593 11.7778 10.7397C11.8057 10.7004 11.8433 10.6455 11.8873 10.5763C11.9752 10.4383 12.0898 10.2414 12.204 9.99674C12.4305 9.51139 12.6666 8.81329 12.6666 8.00054C12.6666 7.18778 12.4305 6.48968 12.204 6.00433C12.0898 5.75964 11.9752 5.56278 11.8873 5.42476C11.8433 5.35558 11.8057 5.30068 11.7778 5.26141C11.7639 5.24176 11.7523 5.22598 11.7436 5.21424L11.7326 5.19967L11.7288 5.19468L11.7273 5.19276L11.7267 5.19195C11.7267 5.19195 11.7261 5.19124 11.1999 5.60054L11.7261 5.19124Z" fill="#737D8C"/>
<path d="M2.40733 13.2096C2.63337 13.5003 3.05223 13.5526 3.34286 13.3266C3.63317 13.1008 3.68572 12.6826 3.46054 12.392L3.46004 12.3914L3.45939 12.3905L3.45029 12.3784C3.44145 12.3665 3.42722 12.3472 3.40836 12.3206C3.37062 12.2674 3.31448 12.1857 3.246 12.0781C3.10883 11.8626 2.9235 11.5452 2.73768 11.147C2.36418 10.3466 2.00023 9.24473 2.00023 8.00034C2.00023 6.75596 2.36418 5.65406 2.73768 4.8537C2.9235 4.45553 3.10883 4.13811 3.246 3.92255C3.31448 3.81495 3.37062 3.73324 3.40836 3.6801C3.42722 3.65354 3.44145 3.63416 3.45029 3.6223L3.45939 3.61019L3.46004 3.60933L3.46064 3.60855C3.68571 3.31797 3.63313 2.89988 3.34286 2.67411C3.05223 2.44806 2.63337 2.50042 2.40733 2.79105L2.90417 3.17748C2.40732 2.79106 2.40733 2.79105 2.40733 2.79105L2.406 2.79276L2.40409 2.79524L2.39856 2.80247L2.3809 2.82594C2.3663 2.84555 2.34615 2.87305 2.32126 2.9081C2.2715 2.97817 2.20265 3.0786 2.12112 3.20671C1.95829 3.46259 1.74363 3.83088 1.52944 4.28985C1.10294 5.20378 0.666896 6.50188 0.666896 8.00034C0.666896 9.49881 1.10294 10.7969 1.52944 11.7108C1.74363 12.1698 1.95829 12.5381 2.12112 12.794C2.20265 12.9221 2.2715 13.0225 2.32126 13.0926C2.34615 13.1276 2.3663 13.1551 2.3809 13.1747L2.39856 13.1982L2.40409 13.2054L2.406 13.2079L2.40674 13.2089C2.40674 13.2089 2.40733 13.2096 2.93356 12.8003L2.40733 13.2096Z" fill="#737D8C"/>
<path d="M4.14008 10.8095C4.36612 11.1001 4.78497 11.1525 5.0756 10.9264C5.36548 10.701 5.41832 10.2837 5.19431 9.99318L5.19202 9.99013C5.18904 9.98614 5.18341 9.97848 5.17549 9.96732C5.15962 9.94498 5.13473 9.90881 5.10375 9.86014C5.04158 9.76244 4.95625 9.61644 4.87043 9.43256C4.69693 9.06077 4.53298 8.55887 4.53298 8.0002C4.53298 7.44152 4.69693 6.93963 4.87043 6.56784C4.95625 6.38395 5.04158 6.23796 5.10375 6.14026C5.13473 6.09158 5.15962 6.05542 5.17549 6.03307C5.18341 6.02192 5.18904 6.01426 5.19202 6.01026L5.19432 6.0072C5.41832 5.71667 5.36547 5.29942 5.0756 5.07396C4.78497 4.84792 4.36612 4.90027 4.14008 5.19091L4.66631 5.6002C4.14008 5.19091 4.14008 5.19091 4.14008 5.19091L4.13889 5.19243L4.13742 5.19434L4.1336 5.19933L4.12263 5.21391C4.11389 5.22565 4.10234 5.24143 4.08838 5.26107C4.0605 5.30034 4.02289 5.35524 3.97887 5.42442C3.89104 5.56244 3.77638 5.7593 3.66219 6.00399C3.43569 6.48934 3.19964 7.18745 3.19964 8.0002C3.19964 8.81295 3.43569 9.51105 3.66219 9.99641C3.77638 10.2411 3.89104 10.438 3.97887 10.576C4.02289 10.6452 4.0605 10.7001 4.08838 10.7393C4.10234 10.759 4.11389 10.7747 4.12263 10.7865L4.1336 10.8011L4.13742 10.8061L4.13889 10.808L4.13952 10.8088C4.13952 10.8088 4.14008 10.8095 4.66631 10.4002L4.14008 10.8095Z" fill="#737D8C"/>
<circle cx="8.00033" cy="8.00008" r="1.33333" fill="#737D8C"/>
</svg>

After

Width:  |  Height:  |  Size: 5.3 KiB

View file

@ -0,0 +1,12 @@
{
"images" : [
{
"filename" : "voice_broadcast_tile_mic.svg",
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View file

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M5.39966 4.1C5.39966 2.66406 6.56372 1.5 7.99966 1.5C9.4356 1.5 10.5997 2.66406 10.5997 4.1V7.98837C10.5997 9.42431 9.4356 10.5884 7.99966 10.5884C6.56372 10.5884 5.39966 9.4243 5.39966 7.98836V4.1Z" fill="#737D8C"/>
<path d="M3.44954 7.15837C3.90978 7.15837 4.28288 7.53146 4.28288 7.9917C4.28288 10.0368 5.94052 11.6972 7.98961 11.7025C7.99296 11.7025 7.99632 11.7025 7.99967 11.7025C8.00302 11.7025 8.00636 11.7025 8.00969 11.7025C10.0587 11.6971 11.7162 10.0368 11.7162 7.9917C11.7162 7.53146 12.0893 7.15837 12.5495 7.15837C13.0098 7.15837 13.3829 7.53146 13.3829 7.9917C13.3829 10.6793 11.4097 12.905 8.83301 13.3051V13.8341C8.83301 14.2944 8.45991 14.6675 7.99967 14.6675C7.53944 14.6675 7.16634 14.2944 7.16634 13.8341V13.3052C4.58956 12.9052 2.61621 10.6794 2.61621 7.9917C2.61621 7.53146 2.98931 7.15837 3.44954 7.15837Z" fill="#737D8C"/>
</svg>

After

Width:  |  Height:  |  Size: 961 B

View file

@ -19,4 +19,3 @@
// MARK: Onboarding Personalization WIP // MARK: Onboarding Personalization WIP
"image_picker_action_files" = "Choose from files"; "image_picker_action_files" = "Choose from files";
"voice_broadcast_in_timeline_title" = "Voice broadcast detected (under active development)";

View file

@ -797,7 +797,7 @@ Tap the + to start adding people.";
"settings_labs_enable_new_session_manager" = "New session manager"; "settings_labs_enable_new_session_manager" = "New session manager";
"settings_labs_enable_new_client_info_feature" = "Record the client name, version, and url to recognise sessions more easily in session manager"; "settings_labs_enable_new_client_info_feature" = "Record the client name, version, and url to recognise sessions more easily in session manager";
"settings_labs_enable_new_app_layout" = "New Application Layout"; "settings_labs_enable_new_app_layout" = "New Application Layout";
"settings_labs_enable_wysiwyg_composer" = "Try out the rich text editor (plain text mode coming soon)"; "settings_labs_enable_wysiwyg_composer" = "Try out the rich text editor";
"settings_labs_enable_voice_broadcast" = "Voice broadcast (under active development)"; "settings_labs_enable_voice_broadcast" = "Voice broadcast (under active development)";
"settings_version" = "Version %@"; "settings_version" = "Version %@";
@ -2195,6 +2195,8 @@ Tap the + to start adding people.";
"voice_broadcast_blocked_by_someone_else_message" = "Someone else is already recording a voice broadcast. Wait for their voice broadcast to end to start a new one."; "voice_broadcast_blocked_by_someone_else_message" = "Someone else is already recording a voice broadcast. Wait for their voice broadcast to end to start a new one.";
"voice_broadcast_already_in_progress_message" = "You are already recording a voice broadcast. Please end your current voice broadcast to start a new one."; "voice_broadcast_already_in_progress_message" = "You are already recording a voice broadcast. Please end your current voice broadcast to start a new one.";
"voice_broadcast_playback_loading_error" = "Unable to play this voice broadcast."; "voice_broadcast_playback_loading_error" = "Unable to play this voice broadcast.";
"voice_broadcast_live" = "Live";
"voice_broadcast_tile" = "Voice broadcast";
// Mark: - Version check // Mark: - Version check

View file

@ -197,6 +197,8 @@ internal class Asset: NSObject {
internal static let peopleFloatingAction = ImageAsset(name: "people_floating_action") internal static let peopleFloatingAction = ImageAsset(name: "people_floating_action")
internal static let actionCamera = ImageAsset(name: "action_camera") internal static let actionCamera = ImageAsset(name: "action_camera")
internal static let actionFile = ImageAsset(name: "action_file") internal static let actionFile = ImageAsset(name: "action_file")
internal static let actionFormattingDisabled = ImageAsset(name: "action_formatting_disabled")
internal static let actionFormattingEnabled = ImageAsset(name: "action_formatting_enabled")
internal static let actionLive = ImageAsset(name: "action_live") internal static let actionLive = ImageAsset(name: "action_live")
internal static let actionLocation = ImageAsset(name: "action_location") internal static let actionLocation = ImageAsset(name: "action_location")
internal static let actionMediaLibrary = ImageAsset(name: "action_media_library") internal static let actionMediaLibrary = ImageAsset(name: "action_media_library")
@ -343,6 +345,8 @@ internal class Asset: NSObject {
internal static let voiceBroadcastRecord = ImageAsset(name: "voice_broadcast_record") internal static let voiceBroadcastRecord = ImageAsset(name: "voice_broadcast_record")
internal static let voiceBroadcastRecordPause = ImageAsset(name: "voice_broadcast_record_pause") internal static let voiceBroadcastRecordPause = ImageAsset(name: "voice_broadcast_record_pause")
internal static let voiceBroadcastStop = ImageAsset(name: "voice_broadcast_stop") internal static let voiceBroadcastStop = ImageAsset(name: "voice_broadcast_stop")
internal static let voiceBroadcastTileLive = ImageAsset(name: "voice_broadcast_tile_live")
internal static let voiceBroadcastTileMic = ImageAsset(name: "voice_broadcast_tile_mic")
internal static let launchScreenLogo = ImageAsset(name: "launch_screen_logo") internal static let launchScreenLogo = ImageAsset(name: "launch_screen_logo")
} }
@objcMembers @objcMembers

View file

@ -7547,7 +7547,7 @@ public class VectorL10n: NSObject {
public static var settingsLabsEnableVoiceBroadcast: String { public static var settingsLabsEnableVoiceBroadcast: String {
return VectorL10n.tr("Vector", "settings_labs_enable_voice_broadcast") return VectorL10n.tr("Vector", "settings_labs_enable_voice_broadcast")
} }
/// Try out the rich text editor (plain text mode coming soon) /// Try out the rich text editor
public static var settingsLabsEnableWysiwygComposer: String { public static var settingsLabsEnableWysiwygComposer: String {
return VectorL10n.tr("Vector", "settings_labs_enable_wysiwyg_composer") return VectorL10n.tr("Vector", "settings_labs_enable_wysiwyg_composer")
} }
@ -9087,6 +9087,10 @@ public class VectorL10n: NSObject {
public static var voiceBroadcastBlockedBySomeoneElseMessage: String { public static var voiceBroadcastBlockedBySomeoneElseMessage: String {
return VectorL10n.tr("Vector", "voice_broadcast_blocked_by_someone_else_message") return VectorL10n.tr("Vector", "voice_broadcast_blocked_by_someone_else_message")
} }
/// Live
public static var voiceBroadcastLive: String {
return VectorL10n.tr("Vector", "voice_broadcast_live")
}
/// You don't have the required permissions to start a voice broadcast in this room. Contact a room administrator to upgrade your permissions. /// You don't have the required permissions to start a voice broadcast in this room. Contact a room administrator to upgrade your permissions.
public static var voiceBroadcastPermissionDeniedMessage: String { public static var voiceBroadcastPermissionDeniedMessage: String {
return VectorL10n.tr("Vector", "voice_broadcast_permission_denied_message") return VectorL10n.tr("Vector", "voice_broadcast_permission_denied_message")
@ -9095,6 +9099,10 @@ public class VectorL10n: NSObject {
public static var voiceBroadcastPlaybackLoadingError: String { public static var voiceBroadcastPlaybackLoadingError: String {
return VectorL10n.tr("Vector", "voice_broadcast_playback_loading_error") return VectorL10n.tr("Vector", "voice_broadcast_playback_loading_error")
} }
/// Voice broadcast
public static var voiceBroadcastTile: String {
return VectorL10n.tr("Vector", "voice_broadcast_tile")
}
/// Can't start a new voice broadcast /// Can't start a new voice broadcast
public static var voiceBroadcastUnauthorizedTitle: String { public static var voiceBroadcastUnauthorizedTitle: String {
return VectorL10n.tr("Vector", "voice_broadcast_unauthorized_title") return VectorL10n.tr("Vector", "voice_broadcast_unauthorized_title")

View file

@ -14,10 +14,6 @@ public extension VectorL10n {
static var imagePickerActionFiles: String { static var imagePickerActionFiles: String {
return VectorL10n.tr("Untranslated", "image_picker_action_files") return VectorL10n.tr("Untranslated", "image_picker_action_files")
} }
/// Voice broadcast detected (under active development)
static var voiceBroadcastInTimelineTitle: String {
return VectorL10n.tr("Untranslated", "voice_broadcast_in_timeline_title")
}
} }
// swiftlint:enable function_parameter_count identifier_name line_length type_body_length // swiftlint:enable function_parameter_count identifier_name line_length type_body_length

View file

@ -177,6 +177,9 @@ final class RiotSettings: NSObject {
@UserDefault(key: "enableWysiwygComposer", defaultValue: false, storage: defaults) @UserDefault(key: "enableWysiwygComposer", defaultValue: false, storage: defaults)
var enableWysiwygComposer var enableWysiwygComposer
@UserDefault(key: "enableWysiwygTextFormatting", defaultValue: true, storage: defaults)
var enableWysiwygTextFormatting
/// Flag indicating if the IP addresses should be shown in the new device manager /// Flag indicating if the IP addresses should be shown in the new device manager
@UserDefault(key: UserDefaultsKeys.showIPAddressesInSessionsManager, defaultValue: false, storage: defaults) @UserDefault(key: UserDefaultsKeys.showIPAddressesInSessionsManager, defaultValue: false, storage: defaults)
var showIPAddressesInSessionsManager var showIPAddressesInSessionsManager

View file

@ -152,7 +152,7 @@
UITableViewCell *cell = [super tableView:tableView cellForRowAtIndexPath:indexPath]; UITableViewCell *cell = [super tableView:tableView cellForRowAtIndexPath:indexPath];
// Finalize cell view customization here // Finalize cell view customization here
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell; MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell;

View file

@ -105,6 +105,8 @@ typedef NS_ENUM(NSInteger, RoomBubbleCellDataTag)
*/ */
@property(nonatomic) NSInteger componentIndexOfSentMessageTick; @property(nonatomic) NSInteger componentIndexOfSentMessageTick;
@property(nonatomic, strong) NSString *voiceBroadcastState;
/** /**
Indicate that both the text message layout and any additional content height are no longer Indicate that both the text message layout and any additional content height are no longer
valid and should be recomputed before presentation in a bubble cell. This could be due valid and should be recomputed before presentation in a bubble cell. This could be due

View file

@ -186,17 +186,31 @@ NSString *const URLPreviewDidUpdateNotification = @"URLPreviewDidUpdateNotificat
else if ([event.type isEqualToString:VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType]) else if ([event.type isEqualToString:VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType])
{ {
VoiceBroadcastInfo *voiceBroadcastInfo = [VoiceBroadcastInfo modelFromJSON: event.content]; VoiceBroadcastInfo *voiceBroadcastInfo = [VoiceBroadcastInfo modelFromJSON: event.content];
// Check if the state event corresponds to the beginning of a voice broadcast
if ([VoiceBroadcastInfo isStartedFor:voiceBroadcastInfo.state]) if ([VoiceBroadcastInfo isStartedFor:voiceBroadcastInfo.state])
{ {
// This state event corresponds to the beginning of a voice broadcast // Retrieve the most recent voice broadcast info.
// Check whether this is a local live broadcast to display it with the recorder view or not MXEvent *lastVoiceBroadcastInfoEvent = [roomDataSource.roomState stateEventsWithType:VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType].lastObject;
// Note: Because of race condition, the voiceBroadcastService may be running without id here (the sync response may be received before if (event.originServerTs > lastVoiceBroadcastInfoEvent.originServerTs)
// the success of the event sending), in that case, we will display a recorder view by default to let the user be able to stop a potential record. {
if ([event.sender isEqualToString: self.mxSession.myUserId] && lastVoiceBroadcastInfoEvent = event;
[voiceBroadcastInfo.deviceId isEqualToString:self.mxSession.myDeviceId] && }
self.mxSession.voiceBroadcastService != nil &&
([event.eventId isEqualToString: self.mxSession.voiceBroadcastService.voiceBroadcastInfoEventId] || VoiceBroadcastInfo *lastVoiceBroadcastInfo = [VoiceBroadcastInfo modelFromJSON: lastVoiceBroadcastInfoEvent.content];
self.mxSession.voiceBroadcastService.voiceBroadcastInfoEventId == nil))
// Handle the specific case where the state event is a started voice broadcast (the voiceBroadcastId is the event id itself).
if (!lastVoiceBroadcastInfo.voiceBroadcastId)
{
lastVoiceBroadcastInfo.voiceBroadcastId = lastVoiceBroadcastInfoEvent.eventId;
}
// Check if the voice broadcast is still alive.
if ([lastVoiceBroadcastInfo.voiceBroadcastId isEqualToString:event.eventId] && ![VoiceBroadcastInfo isStoppedFor:lastVoiceBroadcastInfo.state])
{
// Check whether this broadcast is sent from the currrent session to display it with the recorder view or not.
if ([event.stateKey isEqualToString:self.mxSession.myUserId] &&
[voiceBroadcastInfo.deviceId isEqualToString:self.mxSession.myDeviceId])
{ {
self.tag = RoomBubbleCellDataTagVoiceBroadcastRecord; self.tag = RoomBubbleCellDataTagVoiceBroadcastRecord;
} }
@ -204,6 +218,14 @@ NSString *const URLPreviewDidUpdateNotification = @"URLPreviewDidUpdateNotificat
{ {
self.tag = RoomBubbleCellDataTagVoiceBroadcastPlayback; self.tag = RoomBubbleCellDataTagVoiceBroadcastPlayback;
} }
self.voiceBroadcastState = lastVoiceBroadcastInfo.state;
}
else
{
self.tag = RoomBubbleCellDataTagVoiceBroadcastPlayback;
self.voiceBroadcastState = VoiceBroadcastInfo.stoppedValue;
}
} }
else else
{ {
@ -213,8 +235,9 @@ NSString *const URLPreviewDidUpdateNotification = @"URLPreviewDidUpdateNotificat
{ {
// This state event corresponds to the end of a voice broadcast // This state event corresponds to the end of a voice broadcast
// Force the tag of the potential cellData which corresponds to the started event to switch the display from recorder to listener // Force the tag of the potential cellData which corresponds to the started event to switch the display from recorder to listener
id<MXKRoomBubbleCellDataStoring> bubbleData = [roomDataSource cellDataOfEventWithEventId:voiceBroadcastInfo.eventId]; RoomBubbleCellData *bubbleData = [roomDataSource cellDataOfEventWithEventId:voiceBroadcastInfo.voiceBroadcastId];
bubbleData.tag = RoomBubbleCellDataTagVoiceBroadcastPlayback; bubbleData.tag = RoomBubbleCellDataTagVoiceBroadcastPlayback;
bubbleData.voiceBroadcastState = VoiceBroadcastInfo.stoppedValue;
} }
} }
self.collapsable = NO; self.collapsable = NO;

View file

@ -395,7 +395,7 @@ const CGFloat kTypingCellHeight = 24;
id<RoomTimelineCellDecorator> cellDecorator = [RoomTimelineConfiguration shared].currentStyle.cellDecorator; id<RoomTimelineCellDecorator> cellDecorator = [RoomTimelineConfiguration shared].currentStyle.cellDecorator;
// Finalize cell view customization here // Finalize cell view customization here
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell; MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell;
[self resetAccessibilityForCell:bubbleCell]; [self resetAccessibilityForCell:bubbleCell];

View file

@ -1859,7 +1859,7 @@
CGFloat localPositionOfEvent = 0.0; CGFloat localPositionOfEvent = 0.0;
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell;
@ -2303,7 +2303,7 @@
CGFloat eventBottomPosition = eventTopPosition + cell.frame.size.height; CGFloat eventBottomPosition = eventTopPosition + cell.frame.size.height;
// Compute accurate event positions in case of bubble with multiple components // Compute accurate event positions in case of bubble with multiple components
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell;
NSArray *bubbleComponents = roomBubbleTableViewCell.bubbleData.bubbleComponents; NSArray *bubbleComponents = roomBubbleTableViewCell.bubbleData.bubbleComponents;
@ -2604,11 +2604,11 @@
roomDataSource.showBubblesDateTime = !roomDataSource.showBubblesDateTime; roomDataSource.showBubblesDateTime = !roomDataSource.showBubblesDateTime;
MXLogDebug(@" -> Turn %@ cells date", roomDataSource.showBubblesDateTime ? @"ON" : @"OFF"); MXLogDebug(@" -> Turn %@ cells date", roomDataSource.showBubblesDateTime ? @"ON" : @"OFF");
} }
else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellTapOnAttachmentView] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellTapOnAttachmentView] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
[self showAttachmentInCell:(MXKRoomBubbleTableViewCell *)cell]; [self showAttachmentInCell:(MXKRoomBubbleTableViewCell *)cell];
} }
else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellLongPressOnProgressView] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellLongPressOnProgressView] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell;
@ -2719,7 +2719,7 @@
} }
} }
} }
else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellLongPressOnEvent] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) else if ([actionIdentifier isEqualToString:kMXKRoomBubbleCellLongPressOnEvent] && [cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
[self dismissKeyboard]; [self dismissKeyboard];
@ -3089,7 +3089,7 @@
return; return;
} }
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell;
selectedText = roomBubbleTableViewCell.bubbleData.textMessage; selectedText = roomBubbleTableViewCell.bubbleData.textMessage;
@ -3628,7 +3628,7 @@
// Keep here the image view used to display the attachment in the selected cell. // Keep here the image view used to display the attachment in the selected cell.
// Note: Only `MXKRoomBubbleTableViewCell` and `MXKSearchTableViewCell` are supported for the moment. // Note: Only `MXKRoomBubbleTableViewCell` and `MXKSearchTableViewCell` are supported for the moment.
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
self.openedAttachmentImageView = ((MXKRoomBubbleTableViewCell *)cell).attachmentView.imageView; self.openedAttachmentImageView = ((MXKRoomBubbleTableViewCell *)cell).attachmentView.imageView;
} }
@ -3806,7 +3806,7 @@
}]; }];
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
// Start animation in case of download // Start animation in case of download
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell *)cell;

View file

@ -3237,30 +3237,30 @@ static CGSize kThreadListBarButtonItemImageSize;
{ {
if (bubbleData.isPaginationFirstBubble) if (bubbleData.isPaginationFirstBubble)
{ {
cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcastWithPaginationTitle; cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithPaginationTitle;
} }
else if (bubbleData.shouldHideSenderInformation) else if (bubbleData.shouldHideSenderInformation)
{ {
cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcastWithoutSenderInfo; cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithoutSenderInfo;
} }
else else
{ {
cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcast; cellIdentifier = RoomTimelineCellIdentifierIncomingVoiceBroadcastPlayback;
} }
} }
else else
{ {
if (bubbleData.isPaginationFirstBubble) if (bubbleData.isPaginationFirstBubble)
{ {
cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithPaginationTitle; cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithPaginationTitle;
} }
else if (bubbleData.shouldHideSenderInformation) else if (bubbleData.shouldHideSenderInformation)
{ {
cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithoutSenderInfo; cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithoutSenderInfo;
} }
else else
{ {
cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcast; cellIdentifier = RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlayback;
} }
} }
} }
@ -5115,7 +5115,9 @@ static CGSize kThreadListBarButtonItemImageSize;
[actionItems addObject:@(ComposerCreateActionCamera)]; [actionItems addObject:@(ComposerCreateActionCamera)];
} }
self.composerCreateActionListBridgePresenter = [[ComposerCreateActionListBridgePresenter alloc] initWithActions:actionItems]; self.composerCreateActionListBridgePresenter = [[ComposerCreateActionListBridgePresenter alloc] initWithActions:actionItems
wysiwygEnabled:RiotSettings.shared.enableWysiwygComposer
textFormattingEnabled:RiotSettings.shared.enableWysiwygTextFormatting];
self.composerCreateActionListBridgePresenter.delegate = self; self.composerCreateActionListBridgePresenter.delegate = self;
[self.composerCreateActionListBridgePresenter presentFrom:self animated:YES]; [self.composerCreateActionListBridgePresenter presentFrom:self animated:YES];
} }
@ -5268,7 +5270,7 @@ static CGSize kThreadListBarButtonItemImageSize;
} }
} }
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell*)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell*)cell;
if (roomBubbleTableViewCell.readMarkerView) if (roomBubbleTableViewCell.readMarkerView)
@ -6522,7 +6524,7 @@ static CGSize kThreadListBarButtonItemImageSize;
if (self.roomDataSource.isLive && !self.roomDataSource.isPeeking && self.roomDataSource.showReadMarker && self.roomDataSource.room.accountData.readMarkerEventId) if (self.roomDataSource.isLive && !self.roomDataSource.isPeeking && self.roomDataSource.showReadMarker && self.roomDataSource.room.accountData.readMarkerEventId)
{ {
UITableViewCell *cell = [self.bubblesTableView visibleCells].firstObject; UITableViewCell *cell = [self.bubblesTableView visibleCells].firstObject;
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell*)cell; MXKRoomBubbleTableViewCell *roomBubbleTableViewCell = (MXKRoomBubbleTableViewCell*)cell;
// Check whether the read marker is inside the first displayed cell. // Check whether the read marker is inside the first displayed cell.
@ -8054,6 +8056,11 @@ static CGSize kThreadListBarButtonItemImageSize;
}]; }];
} }
- (void)composerCreateActionListBridgePresenterDelegateDidToggleTextFormatting:(ComposerCreateActionListBridgePresenter *)coordinatorBridgePresenter enabled:(BOOL)enabled
{
[self togglePlainTextMode];
}
- (void)composerCreateActionListBridgePresenterDidDismissInteractively:(ComposerCreateActionListBridgePresenter *)coordinatorBridgePresenter - (void)composerCreateActionListBridgePresenterDidDismissInteractively:(ComposerCreateActionListBridgePresenter *)coordinatorBridgePresenter
{ {
self.composerCreateActionListBridgePresenter = nil; self.composerCreateActionListBridgePresenter = nil;

View file

@ -149,6 +149,11 @@ extension RoomViewController {
} }
} }
} }
@objc func togglePlainTextMode() {
RiotSettings.shared.enableWysiwygTextFormatting.toggle()
wysiwygInputToolbar?.textFormattingEnabled.toggle()
}
} }
// MARK: - Private Helpers // MARK: - Private Helpers

View file

@ -131,7 +131,7 @@
UITableViewCell *cell = [super tableView:tableView cellForRowAtIndexPath:indexPath]; UITableViewCell *cell = [super tableView:tableView cellForRowAtIndexPath:indexPath];
// Finalize cell view customization here // Finalize cell view customization here
if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class]) if ([cell isKindOfClass:MXKRoomBubbleTableViewCell.class] && ![cell isKindOfClass:MXKRoomEmptyBubbleTableViewCell.class])
{ {
MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell; MXKRoomBubbleTableViewCell *bubbleCell = (MXKRoomBubbleTableViewCell*)cell;

View file

@ -170,13 +170,13 @@ typedef NS_ENUM(NSUInteger, RoomTimelineCellIdentifier) {
// - Voice broadcast // - Voice broadcast
// -- Incoming // -- Incoming
RoomTimelineCellIdentifierIncomingVoiceBroadcast, RoomTimelineCellIdentifierIncomingVoiceBroadcastPlayback,
RoomTimelineCellIdentifierIncomingVoiceBroadcastWithoutSenderInfo, RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithoutSenderInfo,
RoomTimelineCellIdentifierIncomingVoiceBroadcastWithPaginationTitle, RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithPaginationTitle,
// -- Outgoing // -- Outgoing
RoomTimelineCellIdentifierOutgoingVoiceBroadcast, RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlayback,
RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithoutSenderInfo, RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithoutSenderInfo,
RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithPaginationTitle, RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithPaginationTitle,
// - Voice broadcast recorder // - Voice broadcast recorder
RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorder, RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorder,

View file

@ -135,12 +135,12 @@
- (void)registerVoiceBroadcastCellsForTableView:(UITableView*)tableView - (void)registerVoiceBroadcastCellsForTableView:(UITableView*)tableView
{ {
// Incoming // Incoming
[tableView registerClass:VoiceBroadcastIncomingBubbleCell.class forCellReuseIdentifier:VoiceBroadcastIncomingBubbleCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackIncomingBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackIncomingBubbleCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastIncomingWithoutSenderInfoBubbleCell.class forCellReuseIdentifier:VoiceBroadcastIncomingWithoutSenderInfoBubbleCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackIncomingWithoutSenderInfoBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackIncomingWithoutSenderInfoBubbleCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastIncomingWithPaginationTitleBubbleCell.class forCellReuseIdentifier:VoiceBroadcastIncomingWithPaginationTitleBubbleCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackIncomingWithPaginationTitleBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackIncomingWithPaginationTitleBubbleCell.defaultReuseIdentifier];
// Outgoing // Outgoing
[tableView registerClass:VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell.class forCellReuseIdentifier:VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastOutgoingWithPaginationTitleBubbleCell.class forCellReuseIdentifier:VoiceBroadcastOutgoingWithPaginationTitleBubbleCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackOutgoingWithPaginationTitleBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackOutgoingWithPaginationTitleBubbleCell.defaultReuseIdentifier];
} }
- (void)registerVoiceBroadcastRecorderCellsForTableView:(UITableView*)tableView - (void)registerVoiceBroadcastRecorderCellsForTableView:(UITableView*)tableView
@ -311,17 +311,17 @@
}; };
} }
- (NSDictionary<NSNumber*, Class>*)voiceBroadcastCellsMapping - (NSDictionary<NSNumber*, Class>*)voiceBroadcastPlaybackCellsMapping
{ {
return @{ return @{
// Incoming // Incoming
@(RoomTimelineCellIdentifierIncomingVoiceBroadcast) : VoiceBroadcastIncomingBubbleCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlayback) : VoiceBroadcastPlaybackIncomingBubbleCell.class,
@(RoomTimelineCellIdentifierIncomingVoiceBroadcastWithoutSenderInfo) : VoiceBroadcastIncomingWithoutSenderInfoBubbleCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithoutSenderInfo) : VoiceBroadcastPlaybackIncomingWithoutSenderInfoBubbleCell.class,
@(RoomTimelineCellIdentifierIncomingVoiceBroadcastWithPaginationTitle) : VoiceBroadcastIncomingWithPaginationTitleBubbleCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithPaginationTitle) : VoiceBroadcastPlaybackIncomingWithPaginationTitleBubbleCell.class,
// Outgoing // Outgoing
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcast) : VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlayback) : VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithoutSenderInfo) : VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithoutSenderInfo) : VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithPaginationTitle) : VoiceBroadcastOutgoingWithPaginationTitleBubbleCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithPaginationTitle) : VoiceBroadcastPlaybackOutgoingWithPaginationTitleBubbleCell.class,
}; };
} }

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastIncomingBubbleCell: VoiceBroadcastBubbleCell, BubbleIncomingRoomCellProtocol { class VoiceBroadcastPlaybackIncomingBubbleCell: VoiceBroadcastPlaybackBubbleCell, BubbleIncomingRoomCellProtocol {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastIncomingWithPaginationTitleBubbleCell: VoiceBroadcastIncomingBubbleCell { class VoiceBroadcastPlaybackIncomingWithPaginationTitleBubbleCell: VoiceBroadcastPlaybackIncomingBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastWithoutSenderInfoPlainCell: VoiceBroadcastPlainCell { class VoiceBroadcastPlaybackIncomingWithoutSenderInfoBubbleCell: VoiceBroadcastPlaybackIncomingBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastOutgoingWithPaginationTitleBubbleCell: VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell { class VoiceBroadcastPlaybackOutgoingWithPaginationTitleBubbleCell: VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastOutgoingWithoutSenderInfoBubbleCell: VoiceBroadcastBubbleCell, BubbleOutgoingRoomCellProtocol { class VoiceBroadcastPlaybackOutgoingWithoutSenderInfoBubbleCell: VoiceBroadcastPlaybackBubbleCell, BubbleOutgoingRoomCellProtocol {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import UIKit import UIKit
class VoiceBroadcastBubbleCell: VoiceBroadcastPlainCell { class VoiceBroadcastPlaybackBubbleCell: VoiceBroadcastPlaybackPlainCell {
// MARK: - Properties // MARK: - Properties
@ -95,7 +95,7 @@ class VoiceBroadcastBubbleCell: VoiceBroadcastPlainCell {
} }
// MARK: - RoomCellTimestampDisplayable // MARK: - RoomCellTimestampDisplayable
extension VoiceBroadcastBubbleCell: TimestampDisplayable { extension VoiceBroadcastPlaybackBubbleCell: TimestampDisplayable {
func addTimestampView(_ timestampView: UIView) { func addTimestampView(_ timestampView: UIView) {
guard let messageBubbleBackgroundView = self.getBubbleBackgroundView() else { guard let messageBubbleBackgroundView = self.getBubbleBackgroundView() else {

View file

@ -0,0 +1,37 @@
//
// Copyright 2022 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import Foundation
class VoiceBroadcastPlaybackPlainBubbleCell: VoiceBroadcastPlaybackBubbleCell {
override func setupViews() {
super.setupViews()
// TODO: VB update margins attributes
let leftMargin: CGFloat = BubbleRoomCellLayoutConstants.incomingBubbleBackgroundMargins.left + BubbleRoomCellLayoutConstants.pollBubbleBackgroundInsets.left
let rightMargin: CGFloat = 15 + BubbleRoomCellLayoutConstants.pollBubbleBackgroundInsets.right
roomCellContentView?.innerContentViewLeadingConstraint.constant = leftMargin
roomCellContentView?.innerContentViewTrailingConstraint.constant = rightMargin
}
override func update(theme: Theme) {
super.update(theme: theme)
self.bubbleBackgroundColor = theme.roomCellIncomingBubbleBackgroundColor
}
}

View file

@ -0,0 +1,37 @@
//
// Copyright 2022 New Vector Ltd
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
import Foundation
class VoiceBroadcastRecorderPlainBubbleCell: VoiceBroadcastRecorderBubbleCell {
override func setupViews() {
super.setupViews()
// TODO: VB update margins attributes
let leftMargin: CGFloat = BubbleRoomCellLayoutConstants.incomingBubbleBackgroundMargins.left + BubbleRoomCellLayoutConstants.pollBubbleBackgroundInsets.left
let rightMargin: CGFloat = 15 + BubbleRoomCellLayoutConstants.pollBubbleBackgroundInsets.right
roomCellContentView?.innerContentViewLeadingConstraint.constant = leftMargin
roomCellContentView?.innerContentViewTrailingConstraint.constant = rightMargin
}
override func update(theme: Theme) {
super.update(theme: theme)
self.bubbleBackgroundColor = theme.roomCellIncomingBubbleBackgroundColor
}
}

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastPlainCell: SizableBaseRoomCell, RoomCellReactionsDisplayable, RoomCellReadMarkerDisplayable { class VoiceBroadcastPlaybackPlainCell: SizableBaseRoomCell, RoomCellReactionsDisplayable, RoomCellReadMarkerDisplayable {
private var event: MXEvent? private var event: MXEvent?
@ -28,7 +28,10 @@ class VoiceBroadcastPlainCell: SizableBaseRoomCell, RoomCellReactionsDisplayable
let event = bubbleData.events.last, let event = bubbleData.events.last,
let voiceBroadcastContent = VoiceBroadcastInfo(fromJSON: event.content), let voiceBroadcastContent = VoiceBroadcastInfo(fromJSON: event.content),
voiceBroadcastContent.state == VoiceBroadcastInfo.State.started.rawValue, voiceBroadcastContent.state == VoiceBroadcastInfo.State.started.rawValue,
let controller = VoiceBroadcastPlaybackProvider.shared.buildVoiceBroadcastPlaybackVCForEvent(event, senderDisplayName: bubbleData.senderDisplayName) else { let controller = VoiceBroadcastPlaybackProvider.shared.buildVoiceBroadcastPlaybackVCForEvent(event,
senderDisplayName: bubbleData.senderDisplayName,
voiceBroadcastState: bubbleData.voiceBroadcastState)
else {
return return
} }
@ -54,4 +57,4 @@ class VoiceBroadcastPlainCell: SizableBaseRoomCell, RoomCellReactionsDisplayable
} }
} }
extension VoiceBroadcastPlainCell: RoomCellThreadSummaryDisplayable {} extension VoiceBroadcastPlaybackPlainCell: RoomCellThreadSummaryDisplayable {}

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastWithPaginationTitlePlainCell: VoiceBroadcastPlainCell { class VoiceBroadcastPlaybackWithPaginationTitlePlainCell: VoiceBroadcastPlaybackPlainBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastIncomingWithoutSenderInfoBubbleCell: VoiceBroadcastIncomingBubbleCell { class VoiceBroadcastPlaybackWithoutSenderInfoPlainCell: VoiceBroadcastPlaybackPlainBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastRecorderWithPaginationTitlePlainCell: VoiceBroadcastRecorderPlainCell { class VoiceBroadcastRecorderWithPaginationTitlePlainCell: VoiceBroadcastRecorderPlainBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -16,7 +16,7 @@
import Foundation import Foundation
class VoiceBroadcastRecorderWithoutSenderInfoPlainCell: VoiceBroadcastRecorderPlainCell { class VoiceBroadcastRecorderWithoutSenderInfoPlainCell: VoiceBroadcastRecorderPlainBubbleCell {
override func setupViews() { override func setupViews() {
super.setupViews() super.setupViews()

View file

@ -56,7 +56,7 @@ NS_ASSUME_NONNULL_BEGIN
- (NSDictionary<NSNumber*, Class>*)locationCellsMapping; - (NSDictionary<NSNumber*, Class>*)locationCellsMapping;
- (NSDictionary<NSNumber*, Class>*)voiceBroadcastCellsMapping; - (NSDictionary<NSNumber*, Class>*)voiceBroadcastPlaybackCellsMapping;
- (NSDictionary<NSNumber*, Class>*)voiceBroadcastRecorderCellsMapping; - (NSDictionary<NSNumber*, Class>*)voiceBroadcastRecorderCellsMapping;

View file

@ -276,14 +276,14 @@
- (void)registerVoiceBroadcastCellsForTableView:(UITableView*)tableView - (void)registerVoiceBroadcastCellsForTableView:(UITableView*)tableView
{ {
[tableView registerClass:VoiceBroadcastPlainCell.class forCellReuseIdentifier:VoiceBroadcastPlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackPlainBubbleCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackPlainBubbleCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastWithoutSenderInfoPlainCell.class forCellReuseIdentifier:VoiceBroadcastWithoutSenderInfoPlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackWithoutSenderInfoPlainCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackWithoutSenderInfoPlainCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastWithPaginationTitlePlainCell.class forCellReuseIdentifier:VoiceBroadcastWithPaginationTitlePlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastPlaybackWithPaginationTitlePlainCell.class forCellReuseIdentifier:VoiceBroadcastPlaybackWithPaginationTitlePlainCell.defaultReuseIdentifier];
} }
- (void)registerVoiceBroadcastRecorderCellsForTableView:(UITableView*)tableView - (void)registerVoiceBroadcastRecorderCellsForTableView:(UITableView*)tableView
{ {
[tableView registerClass:VoiceBroadcastRecorderPlainCell.class forCellReuseIdentifier:VoiceBroadcastRecorderPlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastRecorderPlainBubbleCell.class forCellReuseIdentifier:VoiceBroadcastRecorderPlainBubbleCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastRecorderWithoutSenderInfoPlainCell.class forCellReuseIdentifier:VoiceBroadcastRecorderWithoutSenderInfoPlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastRecorderWithoutSenderInfoPlainCell.class forCellReuseIdentifier:VoiceBroadcastRecorderWithoutSenderInfoPlainCell.defaultReuseIdentifier];
[tableView registerClass:VoiceBroadcastRecorderWithPaginationTitlePlainCell.class forCellReuseIdentifier:VoiceBroadcastRecorderWithPaginationTitlePlainCell.defaultReuseIdentifier]; [tableView registerClass:VoiceBroadcastRecorderWithPaginationTitlePlainCell.class forCellReuseIdentifier:VoiceBroadcastRecorderWithPaginationTitlePlainCell.defaultReuseIdentifier];
} }
@ -346,8 +346,8 @@
NSDictionary *locationCellsMapping = [self locationCellsMapping]; NSDictionary *locationCellsMapping = [self locationCellsMapping];
[cellClasses addEntriesFromDictionary:locationCellsMapping]; [cellClasses addEntriesFromDictionary:locationCellsMapping];
NSDictionary *voiceBroadcastCellsMapping = [self voiceBroadcastCellsMapping]; NSDictionary *voiceBroadcastPlaybackCellsMapping = [self voiceBroadcastPlaybackCellsMapping];
[cellClasses addEntriesFromDictionary:voiceBroadcastCellsMapping]; [cellClasses addEntriesFromDictionary:voiceBroadcastPlaybackCellsMapping];
NSDictionary *voiceBroadcastRecorderCellsMapping = [self voiceBroadcastRecorderCellsMapping]; NSDictionary *voiceBroadcastRecorderCellsMapping = [self voiceBroadcastRecorderCellsMapping];
[cellClasses addEntriesFromDictionary:voiceBroadcastRecorderCellsMapping]; [cellClasses addEntriesFromDictionary:voiceBroadcastRecorderCellsMapping];
@ -574,17 +574,17 @@
}; };
} }
- (NSDictionary<NSNumber*, Class>*)voiceBroadcastCellsMapping - (NSDictionary<NSNumber*, Class>*)voiceBroadcastPlaybackCellsMapping
{ {
return @{ return @{
// Incoming // Incoming
@(RoomTimelineCellIdentifierIncomingVoiceBroadcast) : VoiceBroadcastPlainCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlayback) : VoiceBroadcastPlaybackPlainBubbleCell.class,
@(RoomTimelineCellIdentifierIncomingVoiceBroadcastWithoutSenderInfo) : VoiceBroadcastWithoutSenderInfoPlainCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithoutSenderInfo) : VoiceBroadcastPlaybackWithoutSenderInfoPlainCell.class,
@(RoomTimelineCellIdentifierIncomingVoiceBroadcastWithPaginationTitle) : VoiceBroadcastWithPaginationTitlePlainCell.class, @(RoomTimelineCellIdentifierIncomingVoiceBroadcastPlaybackWithPaginationTitle) : VoiceBroadcastPlaybackWithPaginationTitlePlainCell.class,
// Outoing // Outoing
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcast) : VoiceBroadcastPlainCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlayback) : VoiceBroadcastPlaybackPlainBubbleCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithoutSenderInfo) : VoiceBroadcastWithoutSenderInfoPlainCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithoutSenderInfo) : VoiceBroadcastPlaybackWithoutSenderInfoPlainCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastWithPaginationTitle) : VoiceBroadcastWithPaginationTitlePlainCell.class @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastPlaybackWithPaginationTitle) : VoiceBroadcastPlaybackWithPaginationTitlePlainCell.class
}; };
} }
@ -592,7 +592,7 @@
{ {
return @{ return @{
// Outoing // Outoing
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorder) : VoiceBroadcastRecorderPlainCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorder) : VoiceBroadcastRecorderPlainBubbleCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorderWithoutSenderInfo) : VoiceBroadcastRecorderWithoutSenderInfoPlainCell.class, @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorderWithoutSenderInfo) : VoiceBroadcastRecorderWithoutSenderInfoPlainCell.class,
@(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorderWithPaginationTitle) : VoiceBroadcastRecorderWithPaginationTitlePlainCell.class @(RoomTimelineCellIdentifierOutgoingVoiceBroadcastRecorderWithPaginationTitle) : VoiceBroadcastRecorderWithPaginationTitlePlainCell.class
}; };

View file

@ -66,6 +66,7 @@ class WysiwygInputToolbarView: MXKRoomInputToolbarView, NibLoadable, HtmlRoomInp
viewModel.callback = { [weak self] result in viewModel.callback = { [weak self] result in
self?.handleViewModelResult(result) self?.handleViewModelResult(result)
} }
wysiwygViewModel.plainTextMode = !RiotSettings.shared.enableWysiwygTextFormatting
inputAccessoryViewForKeyboard = UIView(frame: .zero) inputAccessoryViewForKeyboard = UIView(frame: .zero)
@ -99,6 +100,7 @@ class WysiwygInputToolbarView: MXKRoomInputToolbarView, NibLoadable, HtmlRoomInp
subView.trailingAnchor.constraint(equalTo: self.trailingAnchor), subView.trailingAnchor.constraint(equalTo: self.trailingAnchor),
subView.bottomAnchor.constraint(equalTo: self.bottomAnchor) subView.bottomAnchor.constraint(equalTo: self.bottomAnchor)
]) ])
cancellables = [ cancellables = [
hostingViewController.heightPublisher hostingViewController.heightPublisher
.removeDuplicates() .removeDuplicates()
@ -135,7 +137,8 @@ class WysiwygInputToolbarView: MXKRoomInputToolbarView, NibLoadable, HtmlRoomInp
} }
private func sendWysiwygMessage(content: WysiwygComposerContent) { private func sendWysiwygMessage(content: WysiwygComposerContent) {
delegate?.roomInputToolbarView?(self, sendFormattedTextMessage: content.html, withRawText: content.plainText) let html = content.html.isEmpty ? content.plainText : content.html
delegate?.roomInputToolbarView?(self, sendFormattedTextMessage: html, withRawText: content.plainText)
} }
private func showSendMediaActions() { private func showSendMediaActions() {
@ -213,6 +216,20 @@ class WysiwygInputToolbarView: MXKRoomInputToolbarView, NibLoadable, HtmlRoomInp
} }
} }
/// Whether text formatting is currently enabled in the composer.
var textFormattingEnabled: Bool {
get {
self.viewModel.textFormattingEnabled
}
set {
self.viewModel.textFormattingEnabled = newValue
self.wysiwygViewModel.plainTextMode = !newValue
if !newValue {
self.wysiwygViewModel.maximised = false
}
}
}
/// Add the voice message toolbar to the composer /// Add the voice message toolbar to the composer
/// - Parameter voiceMessageToolbarView: the voice message toolbar UIView /// - Parameter voiceMessageToolbarView: the voice message toolbar UIView
func setVoiceMessageToolbarView(_ voiceMessageToolbarView: UIView!) { func setVoiceMessageToolbarView(_ voiceMessageToolbarView: UIView!) {

View file

@ -110,7 +110,7 @@ public class VoiceBroadcastAggregator {
guard let event = roomState?.stateEvents(with: .custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType))?.last, guard let event = roomState?.stateEvents(with: .custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType))?.last,
event.stateKey == self.voiceBroadcastSenderId, event.stateKey == self.voiceBroadcastSenderId,
let voiceBroadcastInfo = VoiceBroadcastInfo(fromJSON: event.content), let voiceBroadcastInfo = VoiceBroadcastInfo(fromJSON: event.content),
(event.eventId == self.voiceBroadcastStartEventId || voiceBroadcastInfo.eventId == self.voiceBroadcastStartEventId), (event.eventId == self.voiceBroadcastStartEventId || voiceBroadcastInfo.voiceBroadcastId == self.voiceBroadcastStartEventId),
let state = VoiceBroadcastInfo.State(rawValue: voiceBroadcastInfo.state) else { let state = VoiceBroadcastInfo.State(rawValue: voiceBroadcastInfo.state) else {
return return
} }

View file

@ -32,15 +32,12 @@ NS_ASSUME_NONNULL_BEGIN
@property (nonatomic) NSInteger chunkLength; @property (nonatomic) NSInteger chunkLength;
/// The event id of the started voice broadcast info state event. /// The event id of the started voice broadcast info state event.
@property (nonatomic, strong, nullable) NSString* eventId; @property (nonatomic, strong, nullable) NSString* voiceBroadcastId;
/// The event used to build the MXBeaconInfo.
@property (nonatomic, readonly, nullable) MXEvent *originalEvent;
- (instancetype)initWithDeviceId:(NSString *)deviceId - (instancetype)initWithDeviceId:(NSString *)deviceId
state:(NSString *)state state:(NSString *)state
chunkLength:(NSInteger)chunkLength chunkLength:(NSInteger)chunkLength
eventId:(NSString *)eventId; voiceBroadcastId:(NSString *)voiceBroadcastId;
@end @end

View file

@ -22,14 +22,14 @@
- (instancetype)initWithDeviceId:(NSString *)deviceId - (instancetype)initWithDeviceId:(NSString *)deviceId
state:(NSString *)state state:(NSString *)state
chunkLength:(NSInteger)chunkLength chunkLength:(NSInteger)chunkLength
eventId:(NSString *)eventId voiceBroadcastId:(NSString *)voiceBroadcastId
{ {
if (self = [super init]) if (self = [super init])
{ {
_deviceId = deviceId; _deviceId = deviceId;
_state = state; _state = state;
_chunkLength = chunkLength; _chunkLength = chunkLength;
_eventId = eventId; _voiceBroadcastId = voiceBroadcastId;
} }
return self; return self;
@ -55,7 +55,7 @@
MXJSONModelSetInteger(chunkLength, JSONDictionary[VoiceBroadcastSettings.voiceBroadcastContentKeyChunkLength]); MXJSONModelSetInteger(chunkLength, JSONDictionary[VoiceBroadcastSettings.voiceBroadcastContentKeyChunkLength]);
} }
NSString *eventId; NSString *voiceBroadcastId;
if (JSONDictionary[kMXEventRelationRelatesToKey]) { if (JSONDictionary[kMXEventRelationRelatesToKey]) {
MXEventContentRelatesTo *relatesTo; MXEventContentRelatesTo *relatesTo;
@ -63,11 +63,11 @@
if (relatesTo && [relatesTo.relationType isEqualToString:MXEventRelationTypeReference]) if (relatesTo && [relatesTo.relationType isEqualToString:MXEventRelationTypeReference])
{ {
eventId = relatesTo.eventId; voiceBroadcastId = relatesTo.eventId;
} }
} }
return [[VoiceBroadcastInfo alloc] initWithDeviceId:deviceId state:state chunkLength:chunkLength eventId:eventId]; return [[VoiceBroadcastInfo alloc] initWithDeviceId:deviceId state:state chunkLength:chunkLength voiceBroadcastId:voiceBroadcastId];
} }
- (NSDictionary *)JSONDictionary - (NSDictionary *)JSONDictionary
@ -78,8 +78,8 @@
JSONDictionary[VoiceBroadcastSettings.voiceBroadcastContentKeyState] = self.state; JSONDictionary[VoiceBroadcastSettings.voiceBroadcastContentKeyState] = self.state;
if (_eventId) { if (_voiceBroadcastId) {
MXEventContentRelatesTo *relatesTo = [[MXEventContentRelatesTo alloc] initWithRelationType:MXEventRelationTypeReference eventId:_eventId]; MXEventContentRelatesTo *relatesTo = [[MXEventContentRelatesTo alloc] initWithRelationType:MXEventRelationTypeReference eventId:_voiceBroadcastId];
JSONDictionary[kMXEventRelationRelatesToKey] = relatesTo.JSONDictionary; JSONDictionary[kMXEventRelationRelatesToKey] = relatesTo.JSONDictionary;
} else { } else {

View file

@ -35,4 +35,20 @@ extension VoiceBroadcastInfo {
@objc static func isStopped(for name: String) -> Bool { @objc static func isStopped(for name: String) -> Bool {
return name == State.stopped.rawValue return name == State.stopped.rawValue
} }
@objc static func startedValue() -> String {
return State.started.rawValue
}
@objc static func pausedValue() -> String {
return State.paused.rawValue
}
@objc static func resumedValue() -> String {
return State.resumed.rawValue
}
@objc static func stoppedValue() -> String {
return State.stopped.rawValue
}
} }

View file

@ -23,15 +23,18 @@ public class VoiceBroadcastService: NSObject {
// MARK: - Properties // MARK: - Properties
public private(set) var voiceBroadcastInfoEventId: String?
public let room: MXRoom public let room: MXRoom
public private(set) var voiceBroadcastId: String?
public private(set) var state: VoiceBroadcastInfo.State public private(set) var state: VoiceBroadcastInfo.State
// Mechanism to process one call of sendVoiceBroadcastInfo() at a time
private let asyncTaskQueue: MXAsyncTaskQueue
// MARK: - Setup // MARK: - Setup
public init(room: MXRoom, state: VoiceBroadcastInfo.State) { public init(room: MXRoom, state: VoiceBroadcastInfo.State) {
self.room = room self.room = room
self.state = state self.state = state
self.asyncTaskQueue = MXAsyncTaskQueue(label: "VoiceBroadcastServiceQueueEventSerialQueue-" + MXTools.generateSecret())
} }
// MARK: - Constants // MARK: - Constants
@ -43,14 +46,13 @@ public class VoiceBroadcastService: NSObject {
/// Start a voice broadcast. /// Start a voice broadcast.
/// - Parameters: /// - Parameters:
/// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success. /// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success.
/// - Returns: a `MXHTTPOperation` instance. func startVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) {
func startVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) -> MXHTTPOperation? { sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.started) { [weak self] response in
return sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.started) { [weak self] response in
guard let self = self else { return } guard let self = self else { return }
switch response { switch response {
case .success((let eventIdResponse)): case .success((let eventIdResponse)):
self.voiceBroadcastInfoEventId = eventIdResponse self.voiceBroadcastId = eventIdResponse
completion(.success(eventIdResponse)) completion(.success(eventIdResponse))
case .failure(let error): case .failure(let error):
completion(.failure(error)) completion(.failure(error))
@ -61,25 +63,22 @@ public class VoiceBroadcastService: NSObject {
/// Pause a voice broadcast. /// Pause a voice broadcast.
/// - Parameters: /// - Parameters:
/// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success. /// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success.
/// - Returns: a `MXHTTPOperation` instance. func pauseVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) {
func pauseVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) -> MXHTTPOperation? { sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.paused, completion: completion)
return sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.paused, completion: completion)
} }
/// resume a voice broadcast. /// resume a voice broadcast.
/// - Parameters: /// - Parameters:
/// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success. /// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success.
/// - Returns: a `MXHTTPOperation` instance. func resumeVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) {
func resumeVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) -> MXHTTPOperation? { sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.resumed, completion: completion)
return sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.resumed, completion: completion)
} }
/// stop a voice broadcast info. /// stop a voice broadcast info.
/// - Parameters: /// - Parameters:
/// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success. /// - completion: A closure called when the operation completes. Provides the event id of the event generated on the home server on success.
/// - Returns: a `MXHTTPOperation` instance. func stopVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) {
func stopVoiceBroadcast(completion: @escaping (MXResponse<String?>) -> Void) -> MXHTTPOperation? { sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.stopped, completion: completion)
return sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State.stopped, completion: completion)
} }
func getState() -> String { func getState() -> String {
@ -104,19 +103,17 @@ public class VoiceBroadcastService: NSObject {
func sendChunkOfVoiceBroadcast(audioFileLocalURL: URL, func sendChunkOfVoiceBroadcast(audioFileLocalURL: URL,
mimeType: String?, mimeType: String?,
duration: UInt, duration: UInt,
samples: [Float]?,
sequence: UInt, sequence: UInt,
success: @escaping ((String?) -> Void), success: @escaping ((String?) -> Void),
failure: @escaping ((Error?) -> Void)) { failure: @escaping ((Error?) -> Void)) {
guard let voiceBroadcastInfoEventId = self.voiceBroadcastInfoEventId else { guard let voiceBroadcastId = self.voiceBroadcastId else {
return failure(VoiceBroadcastServiceError.notStarted) return failure(VoiceBroadcastServiceError.notStarted)
} }
self.room.sendChunkOfVoiceBroadcast(localURL: audioFileLocalURL, self.room.sendChunkOfVoiceBroadcast(localURL: audioFileLocalURL,
voiceBroadcastInfoEventId: voiceBroadcastInfoEventId, voiceBroadcastId: voiceBroadcastId,
mimeType: mimeType, mimeType: mimeType,
duration: duration, duration: duration,
samples: samples,
sequence: sequence, sequence: sequence,
success: success, success: success,
failure: failure) failure: failure)
@ -124,10 +121,31 @@ public class VoiceBroadcastService: NSObject {
// MARK: - Private // MARK: - Private
private func sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State, completion: @escaping (MXResponse<String?>) -> Void) -> MXHTTPOperation? { private func allowedStates(from state: VoiceBroadcastInfo.State) -> [VoiceBroadcastInfo.State] {
switch state {
case .started:
return [.paused, .stopped]
case .paused:
return [.resumed, .stopped]
case .resumed:
return [.paused, .stopped]
case .stopped:
return [.started]
}
}
private func sendVoiceBroadcastInfo(state: VoiceBroadcastInfo.State, completion: @escaping (MXResponse<String?>) -> Void) {
guard let userId = self.room.mxSession.myUserId else { guard let userId = self.room.mxSession.myUserId else {
completion(.failure(VoiceBroadcastServiceError.missingUserId)) completion(.failure(VoiceBroadcastServiceError.missingUserId))
return nil return
}
asyncTaskQueue.async { (taskCompleted) in
guard self.allowedStates(from: self.state).contains(state) else {
MXLog.warning("[VoiceBroadcastService] sendVoiceBroadcastInfo: unexpected state change \(self.state) -> \(state)")
completion(.failure(VoiceBroadcastServiceError.unexpectedState))
taskCompleted()
return
} }
let stateKey = userId let stateKey = userId
@ -139,22 +157,24 @@ public class VoiceBroadcastService: NSObject {
voiceBroadcastInfo.state = state.rawValue voiceBroadcastInfo.state = state.rawValue
if state != VoiceBroadcastInfo.State.started { if state != VoiceBroadcastInfo.State.started {
guard let voiceBroadcastInfoEventId = self.voiceBroadcastInfoEventId else { guard let voiceBroadcastId = self.voiceBroadcastId else {
completion(.failure(VoiceBroadcastServiceError.notStarted)) completion(.failure(VoiceBroadcastServiceError.notStarted))
return nil taskCompleted()
return
} }
voiceBroadcastInfo.eventId = voiceBroadcastInfoEventId voiceBroadcastInfo.voiceBroadcastId = voiceBroadcastId
} else { } else {
voiceBroadcastInfo.chunkLength = BuildSettings.voiceBroadcastChunkLength voiceBroadcastInfo.chunkLength = BuildSettings.voiceBroadcastChunkLength
} }
guard let stateEventContent = voiceBroadcastInfo.jsonDictionary() as? [String: Any] else { guard let stateEventContent = voiceBroadcastInfo.jsonDictionary() as? [String: Any] else {
completion(.failure(VoiceBroadcastServiceError.unknown)) completion(.failure(VoiceBroadcastServiceError.unknown))
return nil taskCompleted()
return
} }
return self.room.sendStateEvent(.custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType), self.room.sendStateEvent(.custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType),
content: stateEventContent, stateKey: stateKey) { [weak self] response in content: stateEventContent, stateKey: stateKey) { [weak self] response in
guard let self = self else { return } guard let self = self else { return }
@ -165,6 +185,8 @@ public class VoiceBroadcastService: NSObject {
case .failure(let error): case .failure(let error):
completion(.failure(error)) completion(.failure(error))
} }
taskCompleted()
}
} }
} }
} }
@ -176,10 +198,8 @@ extension VoiceBroadcastService {
/// - Parameters: /// - Parameters:
/// - success: A closure called when the operation is complete. /// - success: A closure called when the operation is complete.
/// - failure: A closure called when the operation fails. /// - failure: A closure called when the operation fails.
/// - Returns: a `MXHTTPOperation` instance. @objc public func startVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) {
@discardableResult self.startVoiceBroadcast { response in
@objc public func startVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) -> MXHTTPOperation? {
return self.startVoiceBroadcast { response in
switch response { switch response {
case .success(let object): case .success(let object):
success(object) success(object)
@ -193,10 +213,8 @@ extension VoiceBroadcastService {
/// - Parameters: /// - Parameters:
/// - success: A closure called when the operation is complete. /// - success: A closure called when the operation is complete.
/// - failure: A closure called when the operation fails. /// - failure: A closure called when the operation fails.
/// - Returns: a `MXHTTPOperation` instance. @objc public func pauseVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) {
@discardableResult self.pauseVoiceBroadcast { response in
@objc public func pauseVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) -> MXHTTPOperation? {
return self.pauseVoiceBroadcast { response in
switch response { switch response {
case .success(let object): case .success(let object):
success(object) success(object)
@ -210,10 +228,8 @@ extension VoiceBroadcastService {
/// - Parameters: /// - Parameters:
/// - success: A closure called when the operation is complete. /// - success: A closure called when the operation is complete.
/// - failure: A closure called when the operation fails. /// - failure: A closure called when the operation fails.
/// - Returns: a `MXHTTPOperation` instance. @objc public func resumeVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) {
@discardableResult self.resumeVoiceBroadcast { response in
@objc public func resumeVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) -> MXHTTPOperation? {
return self.resumeVoiceBroadcast { response in
switch response { switch response {
case .success(let object): case .success(let object):
success(object) success(object)
@ -227,10 +243,8 @@ extension VoiceBroadcastService {
/// - Parameters: /// - Parameters:
/// - success: A closure called when the operation is complete. /// - success: A closure called when the operation is complete.
/// - failure: A closure called when the operation fails. /// - failure: A closure called when the operation fails.
/// - Returns: a `MXHTTPOperation` instance. @objc public func stopVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) {
@discardableResult self.stopVoiceBroadcast { response in
@objc public func stopVoiceBroadcast(success: @escaping (String?) -> Void, failure: @escaping (Error) -> Void) -> MXHTTPOperation? {
return self.stopVoiceBroadcast { response in
switch response { switch response {
case .success(let object): case .success(let object):
success(object) success(object)
@ -247,7 +261,7 @@ extension MXRoom {
/// Send a voice broadcast to the room. /// Send a voice broadcast to the room.
/// - Parameters: /// - Parameters:
/// - localURL: the local filesystem path of the file to send. /// - localURL: the local filesystem path of the file to send.
/// - voiceBroadcastInfoEventId: The id of the voice broadcast info event. /// - voiceBroadcastId: The event id of the started voice broadcast info state event
/// - mimeType: (optional) the mime type of the file. Defaults to `audio/ogg`. /// - mimeType: (optional) the mime type of the file. Defaults to `audio/ogg`.
/// - duration: the length of the voice message in milliseconds /// - duration: the length of the voice message in milliseconds
/// - samples: an array of floating point values normalized to [0, 1] /// - samples: an array of floating point values normalized to [0, 1]
@ -257,19 +271,15 @@ extension MXRoom {
/// - failure: A closure called when the operation fails. /// - failure: A closure called when the operation fails.
/// - Returns: a `MXHTTPOperation` instance. /// - Returns: a `MXHTTPOperation` instance.
@nonobjc @discardableResult func sendChunkOfVoiceBroadcast(localURL: URL, @nonobjc @discardableResult func sendChunkOfVoiceBroadcast(localURL: URL,
voiceBroadcastInfoEventId: String, voiceBroadcastId: String,
mimeType: String?, mimeType: String?,
duration: UInt, duration: UInt,
samples: [Float]?,
threadId: String? = nil, threadId: String? = nil,
sequence: UInt, sequence: UInt,
success: @escaping ((String?) -> Void), success: @escaping ((String?) -> Void),
failure: @escaping ((Error?) -> Void)) -> MXHTTPOperation? { failure: @escaping ((Error?) -> Void)) -> MXHTTPOperation? {
let boxedSamples = samples?.compactMap { NSNumber(value: $0) }
guard let relatesTo = MXEventContentRelatesTo(relationType: MXEventRelationTypeReference, guard let relatesTo = MXEventContentRelatesTo(relationType: MXEventRelationTypeReference,
eventId: voiceBroadcastInfoEventId).jsonDictionary() as? [String: Any] else { eventId: voiceBroadcastId).jsonDictionary() as? [String: Any] else {
failure(VoiceBroadcastServiceError.unknown) failure(VoiceBroadcastServiceError.unknown)
return nil return nil
} }
@ -281,7 +291,7 @@ extension MXRoom {
VoiceBroadcastSettings.voiceBroadcastContentKeyChunkType: sequenceValue], VoiceBroadcastSettings.voiceBroadcastContentKeyChunkType: sequenceValue],
mimeType: mimeType, mimeType: mimeType,
duration: duration, duration: duration,
samples: boxedSamples, samples: nil,
threadId: threadId, threadId: threadId,
localEcho: nil, localEcho: nil,
success: success, success: success,

View file

@ -21,6 +21,7 @@ public enum VoiceBroadcastServiceError: Int, Error {
case missingUserId case missingUserId
case roomNotFound case roomNotFound
case notStarted case notStarted
case unexpectedState
case unknown case unknown
} }

View file

@ -273,7 +273,8 @@ static NSString *const kEventFormatterTimeFormat = @"HH:mm";
return [self renderString:displayText forEvent:event]; return [self renderString:displayText forEvent:event];
} }
} else if ([event.type isEqualToString:VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType]) { } else if ([event.type isEqualToString:VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType]) {
MXLogDebug(@"VB incoming build string") // do not show voice broadcast info in the timeline
return nil;
} }
} }

View file

@ -18,6 +18,7 @@ import Foundation
@objc protocol ComposerCreateActionListBridgePresenterDelegate { @objc protocol ComposerCreateActionListBridgePresenterDelegate {
func composerCreateActionListBridgePresenterDelegateDidComplete(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter, action: ComposerCreateAction) func composerCreateActionListBridgePresenterDelegateDidComplete(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter, action: ComposerCreateAction)
func composerCreateActionListBridgePresenterDelegateDidToggleTextFormatting(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter, enabled: Bool)
func composerCreateActionListBridgePresenterDidDismissInteractively(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter) func composerCreateActionListBridgePresenterDidDismissInteractively(_ coordinatorBridgePresenter: ComposerCreateActionListBridgePresenter)
} }
@ -34,6 +35,8 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// MARK: Private // MARK: Private
private let actions: [ComposerCreateAction] private let actions: [ComposerCreateAction]
private let wysiwygEnabled: Bool
private let textFormattingEnabled: Bool
private var coordinator: ComposerCreateActionListCoordinator? private var coordinator: ComposerCreateActionListCoordinator?
// MARK: Public // MARK: Public
@ -42,10 +45,12 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// MARK: - Setup // MARK: - Setup
init(actions: [Int]) { init(actions: [Int], wysiwygEnabled: Bool, textFormattingEnabled: Bool) {
self.actions = actions.compactMap { self.actions = actions.compactMap {
ComposerCreateAction(rawValue: $0) ComposerCreateAction(rawValue: $0)
} }
self.wysiwygEnabled = wysiwygEnabled
self.textFormattingEnabled = textFormattingEnabled
super.init() super.init()
} }
@ -57,12 +62,16 @@ final class ComposerCreateActionListBridgePresenter: NSObject {
// } // }
func present(from viewController: UIViewController, animated: Bool) { func present(from viewController: UIViewController, animated: Bool) {
let composerCreateActionListCoordinator = ComposerCreateActionListCoordinator(actions: actions) let composerCreateActionListCoordinator = ComposerCreateActionListCoordinator(actions: actions,
wysiwygEnabled: wysiwygEnabled,
textFormattingEnabled: textFormattingEnabled)
composerCreateActionListCoordinator.callback = { [weak self] action in composerCreateActionListCoordinator.callback = { [weak self] action in
guard let self = self else { return } guard let self = self else { return }
switch action { switch action {
case .done(let composeAction): case .done(let composeAction):
self.delegate?.composerCreateActionListBridgePresenterDelegateDidComplete(self, action: composeAction) self.delegate?.composerCreateActionListBridgePresenterDelegateDidComplete(self, action: composeAction)
case .toggleTextFormatting(let enabled):
self.delegate?.composerCreateActionListBridgePresenterDelegateDidToggleTextFormatting(self, enabled: enabled)
case .cancel: case .cancel:
self.delegate?.composerCreateActionListBridgePresenterDidDismissInteractively(self) self.delegate?.composerCreateActionListBridgePresenterDidDismissInteractively(self)
} }

View file

@ -19,6 +19,7 @@ import SwiftUI
/// Actions returned by the coordinator callback /// Actions returned by the coordinator callback
enum ComposerCreateActionListCoordinatorAction { enum ComposerCreateActionListCoordinatorAction {
case done(ComposerCreateAction) case done(ComposerCreateAction)
case toggleTextFormatting(Bool)
case cancel case cancel
} }
@ -39,8 +40,11 @@ final class ComposerCreateActionListCoordinator: NSObject, Coordinator, Presenta
// MARK: - Setup // MARK: - Setup
init(actions: [ComposerCreateAction]) { init(actions: [ComposerCreateAction], wysiwygEnabled: Bool, textFormattingEnabled: Bool) {
viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: actions)) viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(
actions: actions,
wysiwygEnabled: wysiwygEnabled,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: textFormattingEnabled)))
view = ComposerCreateActionList(viewModel: viewModel.context) view = ComposerCreateActionList(viewModel: viewModel.context)
let hostingVC = VectorHostingController(rootView: view) let hostingVC = VectorHostingController(rootView: view)
hostingVC.bottomSheetPreferences = VectorHostingBottomSheetPreferences( hostingVC.bottomSheetPreferences = VectorHostingBottomSheetPreferences(
@ -61,6 +65,8 @@ final class ComposerCreateActionListCoordinator: NSObject, Coordinator, Presenta
switch result { switch result {
case .done(let action): case .done(let action):
self.callback?(.done(action)) self.callback?(.done(action))
case .toggleTextFormatting(let enabled):
self.callback?(.toggleTextFormatting(enabled))
} }
} }
} }

View file

@ -33,7 +33,10 @@ enum MockComposerCreateActionListScreenState: MockScreenState, CaseIterable {
case .fullList: case .fullList:
actions = ComposerCreateAction.allCases actions = ComposerCreateAction.allCases
} }
let viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: actions)) let viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(
actions: actions,
wysiwygEnabled: true,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: true)))
return ( return (
[viewModel], [viewModel],

View file

@ -21,11 +21,15 @@ import Foundation
enum ComposerCreateActionListViewAction { enum ComposerCreateActionListViewAction {
// The user selected an action // The user selected an action
case selectAction(ComposerCreateAction) case selectAction(ComposerCreateAction)
// The user toggled the text formatting action
case toggleTextFormatting(Bool)
} }
enum ComposerCreateActionListViewModelResult: Equatable { enum ComposerCreateActionListViewModelResult: Equatable {
// The user selected an action and is done with the screen // The user selected an action and is done with the screen
case done(ComposerCreateAction) case done(ComposerCreateAction)
// The user toggled the text formatting setting but might not be done with the screen
case toggleTextFormatting(Bool)
} }
// MARK: View // MARK: View
@ -33,6 +37,13 @@ enum ComposerCreateActionListViewModelResult: Equatable {
struct ComposerCreateActionListViewState: BindableState { struct ComposerCreateActionListViewState: BindableState {
/// The list of composer create actions to display to the user /// The list of composer create actions to display to the user
let actions: [ComposerCreateAction] let actions: [ComposerCreateAction]
let wysiwygEnabled: Bool
var bindings: ComposerCreateActionListBindings
}
struct ComposerCreateActionListBindings {
var textFormattingEnabled: Bool
} }
@objc enum ComposerCreateAction: Int { @objc enum ComposerCreateAction: Int {

View file

@ -23,7 +23,13 @@ class ComposerCreateActionListTests: XCTestCase {
var context: ComposerCreateActionListViewModel.Context! var context: ComposerCreateActionListViewModel.Context!
override func setUpWithError() throws { override func setUpWithError() throws {
viewModel = ComposerCreateActionListViewModel(initialViewState: ComposerCreateActionListViewState(actions: ComposerCreateAction.allCases)) viewModel = ComposerCreateActionListViewModel(
initialViewState: ComposerCreateActionListViewState(
actions: ComposerCreateAction.allCases,
wysiwygEnabled: true,
bindings: ComposerCreateActionListBindings(textFormattingEnabled: true)
)
)
context = viewModel.context context = viewModel.context
} }

View file

@ -23,6 +23,12 @@ struct ComposerCreateActionList: View {
@Environment(\.theme) private var theme: ThemeSwiftUI @Environment(\.theme) private var theme: ThemeSwiftUI
private var textFormattingIcon: String {
viewModel.textFormattingEnabled
? Asset.Images.actionFormattingEnabled.name
: Asset.Images.actionFormattingDisabled.name
}
// MARK: Public // MARK: Public
@ObservedObject var viewModel: ComposerCreateActionListViewModel.Context @ObservedObject var viewModel: ComposerCreateActionListViewModel.Context
@ -48,6 +54,29 @@ struct ComposerCreateActionList: View {
.padding(.horizontal, 16) .padding(.horizontal, 16)
.padding(.vertical, 12) .padding(.vertical, 12)
} }
if viewModel.viewState.wysiwygEnabled {
SeparatorLine()
HStack(spacing: 16) {
Image(textFormattingIcon)
.renderingMode(.template)
.foregroundColor(theme.colors.accent)
Text(VectorL10n.wysiwygComposerStartActionTextFormatting)
.foregroundColor(theme.colors.primaryContent)
.font(theme.fonts.body)
.accessibilityIdentifier("textFormatting")
Spacer()
Toggle("", isOn: $viewModel.textFormattingEnabled)
.toggleStyle(ComposerToggleActionStyle())
.labelsHidden()
.onChange(of: viewModel.textFormattingEnabled) { isOn in
viewModel.send(viewAction: .toggleTextFormatting(isOn))
}
}
.contentShape(Rectangle())
.padding(.horizontal, 16)
.padding(.vertical, 12)
}
} }
.padding(.top, 8) .padding(.top, 8)
Spacer() Spacer()
@ -63,3 +92,35 @@ struct ComposerCreateActionList_Previews: PreviewProvider {
stateRenderer.screenGroup() stateRenderer.screenGroup()
} }
} }
struct ComposerToggleActionStyle: ToggleStyle {
@Environment(\.theme) private var theme
func makeBody(configuration: Configuration) -> some View {
HStack {
Rectangle()
.foregroundColor(.clear)
.frame(width: 50, height: 30, alignment: .center)
.overlay(
Rectangle()
.foregroundColor(configuration.isOn
? theme.colors.accent.opacity(0.5)
: theme.colors.primaryContent.opacity(0.25))
.cornerRadius(7)
.padding(.all, 8)
)
.overlay(
Circle()
.foregroundColor(configuration.isOn
? theme.colors.accent
: theme.colors.background)
.padding(.all, 3)
.offset(x: configuration.isOn ? 11 : -11, y: 0)
.shadow(radius: configuration.isOn ? 0.0 : 2.0)
.animation(Animation.linear(duration: 0.1))
).cornerRadius(20)
.onTapGesture { configuration.isOn.toggle() }
}
}
}

View file

@ -35,6 +35,8 @@ class ComposerCreateActionListViewModel: ComposerCreateActionListViewModelType,
switch viewAction { switch viewAction {
case .selectAction(let action): case .selectAction(let action):
callback?(.done(action)) callback?(.done(action))
case .toggleTextFormatting(let enabled):
callback?(.toggleTextFormatting(enabled))
} }
} }
} }

View file

@ -19,6 +19,7 @@ import Foundation
struct ComposerViewState: BindableState { struct ComposerViewState: BindableState {
var eventSenderDisplayName: String? var eventSenderDisplayName: String?
var sendMode: ComposerSendMode = .send var sendMode: ComposerSendMode = .send
var textFormattingEnabled = true
var placeholder: String? var placeholder: String?
var bindings: ComposerBindings var bindings: ComposerBindings
@ -26,7 +27,7 @@ struct ComposerViewState: BindableState {
extension ComposerViewState { extension ComposerViewState {
var shouldDisplayContext: Bool { var shouldDisplayContext: Bool {
return sendMode == .edit || sendMode == .reply sendMode == .edit || sendMode == .reply
} }
var contextDescription: String? { var contextDescription: String? {

View file

@ -83,8 +83,65 @@ struct Composer: View {
var body: some View { var body: some View {
VStack(spacing: 8) { VStack(spacing: 8) {
if viewModel.viewState.textFormattingEnabled {
composerContainer
}
HStack(alignment: .bottom, spacing: 0) {
Button {
showSendMediaActions()
} label: {
Image(Asset.Images.startComposeModule.name)
.resizable()
.foregroundColor(theme.colors.tertiaryContent)
.frame(width: 14, height: 14)
}
.frame(width: 36, height: 36)
.background(Circle().fill(theme.colors.system))
.padding(.trailing, 8)
.accessibilityLabel(VectorL10n.create)
if viewModel.viewState.textFormattingEnabled {
FormattingToolbar(formatItems: formatItems) { type in
wysiwygViewModel.apply(type.action)
}
.frame(height: 44)
Spacer()
} else {
composerContainer
}
Button {
if wysiwygViewModel.plainTextMode {
sendMessageAction(wysiwygViewModel.plainTextModeContent)
} else {
sendMessageAction(wysiwygViewModel.content)
}
wysiwygViewModel.clearContent()
} label: {
if viewModel.viewState.sendMode == .edit {
Image(Asset.Images.saveIcon.name)
} else {
Image(Asset.Images.sendIcon.name)
}
}
.frame(width: 36, height: 36)
.padding(.leading, 8)
.isHidden(!isActionButtonShowing)
.accessibilityIdentifier(actionButtonAccessibilityIdentifier)
.accessibilityLabel(VectorL10n.send)
.onChange(of: wysiwygViewModel.isContentEmpty) { isEmpty in
viewModel.send(viewAction: .contentDidChange(isEmpty: isEmpty))
withAnimation(.easeInOut(duration: 0.15)) {
isActionButtonShowing = !isEmpty
}
}
}
.padding(.horizontal, 12)
.padding(.bottom, 4)
}
}
private var composerContainer: some View {
let rect = RoundedRectangle(cornerRadius: cornerRadius) let rect = RoundedRectangle(cornerRadius: cornerRadius)
VStack(spacing: 12) { return VStack(spacing: 12) {
if viewModel.viewState.shouldDisplayContext { if viewModel.viewState.shouldDisplayContext {
HStack { HStack {
if let imageName = viewModel.viewState.contextImageName { if let imageName = viewModel.viewState.contextImageName {
@ -118,8 +175,11 @@ struct Composer: View {
.placeholder(viewModel.viewState.placeholder, color: theme.colors.tertiaryContent) .placeholder(viewModel.viewState.placeholder, color: theme.colors.tertiaryContent)
.frame(height: wysiwygViewModel.idealHeight) .frame(height: wysiwygViewModel.idealHeight)
.onAppear { .onAppear {
if wysiwygViewModel.isContentEmpty {
wysiwygViewModel.setup() wysiwygViewModel.setup()
} }
}
if viewModel.viewState.textFormattingEnabled {
Button { Button {
wysiwygViewModel.maximised.toggle() wysiwygViewModel.maximised.toggle()
} label: { } label: {
@ -132,6 +192,7 @@ struct Composer: View {
.padding(.leading, 12) .padding(.leading, 12)
.padding(.trailing, 4) .padding(.trailing, 4)
} }
}
.padding(.horizontal, horizontalPadding) .padding(.horizontal, horizontalPadding)
.padding(.top, topPadding) .padding(.top, topPadding)
.padding(.bottom, verticalPadding) .padding(.bottom, verticalPadding)
@ -146,49 +207,6 @@ struct Composer: View {
viewModel.focused = true viewModel.focused = true
} }
} }
HStack(spacing: 0) {
Button {
showSendMediaActions()
} label: {
Image(Asset.Images.startComposeModule.name)
.resizable()
.foregroundColor(theme.colors.tertiaryContent)
.frame(width: 14, height: 14)
}
.frame(width: 36, height: 36)
.background(Circle().fill(theme.colors.system))
.padding(.trailing, 8)
.accessibilityLabel(VectorL10n.create)
FormattingToolbar(formatItems: formatItems) { type in
wysiwygViewModel.apply(type.action)
}
.frame(height: 44)
Spacer()
Button {
sendMessageAction(wysiwygViewModel.content)
wysiwygViewModel.clearContent()
} label: {
if viewModel.viewState.sendMode == .edit {
Image(Asset.Images.saveIcon.name)
} else {
Image(Asset.Images.sendIcon.name)
}
}
.frame(width: 36, height: 36)
.padding(.leading, 8)
.isHidden(!isActionButtonShowing)
.accessibilityIdentifier(actionButtonAccessibilityIdentifier)
.accessibilityLabel(VectorL10n.send)
.onChange(of: wysiwygViewModel.isContentEmpty) { isEmpty in
viewModel.send(viewAction: .contentDidChange(isEmpty: isEmpty))
withAnimation(.easeInOut(duration: 0.15)) {
isActionButtonShowing = !isEmpty
}
}
}
.padding(.horizontal, 12)
.padding(.bottom, 4)
}
} }
} }

View file

@ -36,6 +36,15 @@ final class ComposerViewModel: ComposerViewModelType, ComposerViewModelProtocol
} }
} }
var textFormattingEnabled: Bool {
get {
state.textFormattingEnabled
}
set {
state.textFormattingEnabled = newValue
}
}
var eventSenderDisplayName: String? { var eventSenderDisplayName: String? {
get { get {
state.eventSenderDisplayName state.eventSenderDisplayName

View file

@ -20,6 +20,7 @@ protocol ComposerViewModelProtocol {
var context: ComposerViewModelType.Context { get } var context: ComposerViewModelType.Context { get }
var callback: ((ComposerViewModelResult) -> Void)? { get set } var callback: ((ComposerViewModelResult) -> Void)? { get set }
var sendMode: ComposerSendMode { get set } var sendMode: ComposerSendMode { get set }
var textFormattingEnabled: Bool { get set }
var eventSenderDisplayName: String? { get set } var eventSenderDisplayName: String? { get set }
var placeholder: String? { get set } var placeholder: String? { get set }

View file

@ -48,7 +48,7 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
let voiceBroadcastAggregator = try VoiceBroadcastAggregator(session: parameters.session, room: parameters.room, voiceBroadcastStartEventId: parameters.voiceBroadcastStartEvent.eventId, voiceBroadcastState: parameters.voiceBroadcastState) let voiceBroadcastAggregator = try VoiceBroadcastAggregator(session: parameters.session, room: parameters.room, voiceBroadcastStartEventId: parameters.voiceBroadcastStartEvent.eventId, voiceBroadcastState: parameters.voiceBroadcastState)
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: parameters.senderDisplayName) let details = VoiceBroadcastPlaybackDetails(senderDisplayName: parameters.senderDisplayName, avatarData: parameters.room.avatarData)
viewModel = VoiceBroadcastPlaybackViewModel(details: details, viewModel = VoiceBroadcastPlaybackViewModel(details: details,
mediaServiceProvider: VoiceMessageMediaServiceProvider.sharedProvider, mediaServiceProvider: VoiceMessageMediaServiceProvider.sharedProvider,
cacheManager: VoiceMessageAttachmentCacheManager.sharedManager, cacheManager: VoiceMessageAttachmentCacheManager.sharedManager,
@ -61,7 +61,9 @@ final class VoiceBroadcastPlaybackCoordinator: Coordinator, Presentable {
func start() { } func start() { }
func toPresentable() -> UIViewController { func toPresentable() -> UIViewController {
VectorHostingController(rootView: VoiceBroadcastPlaybackView(viewModel: viewModel.context)) let view = VoiceBroadcastPlaybackView(viewModel: viewModel.context)
.addDependency(AvatarService.instantiate(mediaManager: parameters.session.mediaManager))
return VectorHostingController(rootView: view)
} }
func canEndVoiceBroadcast() -> Bool { func canEndVoiceBroadcast() -> Bool {

View file

@ -26,7 +26,7 @@ class VoiceBroadcastPlaybackProvider {
/// Create or retrieve the voiceBroadcast timeline coordinator for this event and return /// Create or retrieve the voiceBroadcast timeline coordinator for this event and return
/// a view to be displayed in the timeline /// a view to be displayed in the timeline
func buildVoiceBroadcastPlaybackVCForEvent(_ event: MXEvent, senderDisplayName: String?) -> UIViewController? { func buildVoiceBroadcastPlaybackVCForEvent(_ event: MXEvent, senderDisplayName: String?, voiceBroadcastState: String) -> UIViewController? {
guard let session = session, let room = session.room(withRoomId: event.roomId) else { guard let session = session, let room = session.room(withRoomId: event.roomId) else {
return nil return nil
} }
@ -35,26 +35,10 @@ class VoiceBroadcastPlaybackProvider {
return coordinator.toPresentable() return coordinator.toPresentable()
} }
let dispatchGroup = DispatchGroup()
dispatchGroup.enter()
var voiceBroadcastState = VoiceBroadcastInfo.State.stopped
room.state { roomState in
if let stateEvent = roomState?.stateEvents(with: .custom(VoiceBroadcastSettings.voiceBroadcastInfoContentKeyType))?.last,
stateEvent.stateKey == event.stateKey,
let voiceBroadcastInfo = VoiceBroadcastInfo(fromJSON: stateEvent.content),
(stateEvent.eventId == event.eventId || voiceBroadcastInfo.eventId == event.eventId),
let state = VoiceBroadcastInfo.State(rawValue: voiceBroadcastInfo.state) {
voiceBroadcastState = state
}
dispatchGroup.leave()
}
let parameters = VoiceBroadcastPlaybackCoordinatorParameters(session: session, let parameters = VoiceBroadcastPlaybackCoordinatorParameters(session: session,
room: room, room: room,
voiceBroadcastStartEvent: event, voiceBroadcastStartEvent: event,
voiceBroadcastState: voiceBroadcastState, voiceBroadcastState: VoiceBroadcastInfo.State(rawValue: voiceBroadcastState) ?? VoiceBroadcastInfo.State.stopped,
senderDisplayName: senderDisplayName) senderDisplayName: senderDisplayName)
guard let coordinator = try? VoiceBroadcastPlaybackCoordinator(parameters: parameters) else { guard let coordinator = try? VoiceBroadcastPlaybackCoordinator(parameters: parameters) else {
return nil return nil

View file

@ -45,32 +45,48 @@ struct VoiceBroadcastPlaybackView: View {
var body: some View { var body: some View {
let details = viewModel.viewState.details let details = viewModel.viewState.details
VStack(alignment: .center, spacing: 16.0) { VStack(alignment: .center) {
HStack { HStack (alignment: .top) {
Text(details.senderDisplayName ?? "") AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .xSmall)
//Text(VectorL10n.voiceBroadcastInTimelineTitle)
VStack(alignment: .leading, spacing: 0) {
Text(details.avatarData.displayName ?? details.avatarData.matrixItemId)
.font(theme.fonts.bodySB) .font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent) .foregroundColor(theme.colors.primaryContent)
Label {
Text(details.senderDisplayName ?? details.avatarData.matrixItemId)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileMic.image)
}
Label {
Text(VectorL10n.voiceBroadcastTile)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileLive.image)
}
}.frame(maxWidth: .infinity, alignment: .leading)
if viewModel.viewState.broadcastState == .live { if viewModel.viewState.broadcastState == .live {
Button { viewModel.send(viewAction: .playLive) } label: Button { viewModel.send(viewAction: .playLive) } label:
{ {
HStack { Label {
Image(uiImage: Asset.Images.voiceBroadcastLive.image) Text(VectorL10n.voiceBroadcastLive)
.renderingMode(.original) .font(theme.fonts.caption1SB)
Text("Live")
.font(theme.fonts.bodySB)
.foregroundColor(Color.white) .foregroundColor(Color.white)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
} }
} }
.padding(5.0) .padding(.horizontal, 5)
.background(RoundedRectangle(cornerRadius: 4, style: .continuous) .background(RoundedRectangle(cornerRadius: 4, style: .continuous).fill(backgroundColor))
.fill(backgroundColor))
.accessibilityIdentifier("liveButton") .accessibilityIdentifier("liveButton")
} }
} }
.frame(maxWidth: .infinity, alignment: .leading)
if viewModel.viewState.playbackState == .error { if viewModel.viewState.playbackState == .error {
VoiceBroadcastPlaybackErrorView() VoiceBroadcastPlaybackErrorView()
@ -101,13 +117,9 @@ struct VoiceBroadcastPlaybackView: View {
} }
.activityIndicator(show: viewModel.viewState.playbackState == .buffering) .activityIndicator(show: viewModel.viewState.playbackState == .buffering)
} }
} }
.padding([.horizontal, .top], 2.0) .padding([.horizontal, .top], 2.0)
.padding([.bottom]) .padding([.bottom])
.alert(item: $viewModel.alertInfo) { info in
info.alert
}
} }
} }

View file

@ -34,6 +34,7 @@ enum VoiceBroadcastPlaybackState {
struct VoiceBroadcastPlaybackDetails { struct VoiceBroadcastPlaybackDetails {
let senderDisplayName: String? let senderDisplayName: String?
let avatarData: AvatarInputProtocol
} }
enum VoiceBroadcastState { enum VoiceBroadcastState {
@ -51,12 +52,5 @@ struct VoiceBroadcastPlaybackViewState: BindableState {
} }
struct VoiceBroadcastPlaybackViewStateBindings { struct VoiceBroadcastPlaybackViewStateBindings {
// TODO: Neeeded?
var alertInfo: AlertInfo<VoiceBroadcastPlaybackAlertType>?
}
enum VoiceBroadcastPlaybackAlertType {
// TODO: What is it?
case failedClosingVoiceBroadcast
} }

View file

@ -42,7 +42,7 @@ enum MockVoiceBroadcastPlaybackScreenState: MockScreenState, CaseIterable {
/// Generate the view struct for the screen state. /// Generate the view struct for the screen state.
var screenView: ([Any], AnyView) { var screenView: ([Any], AnyView) {
let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice") let details = VoiceBroadcastPlaybackDetails(senderDisplayName: "Alice", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .live, playbackState: .stopped, bindings: VoiceBroadcastPlaybackViewStateBindings())) let viewModel = MockVoiceBroadcastPlaybackViewModel(initialViewState: VoiceBroadcastPlaybackViewState(details: details, broadcastState: .live, playbackState: .stopped, bindings: VoiceBroadcastPlaybackViewStateBindings()))
return ( return (

View file

@ -45,7 +45,7 @@ final class VoiceBroadcastRecorderCoordinator: Coordinator, Presentable {
voiceBroadcastRecorderService = VoiceBroadcastRecorderService(session: parameters.session, roomId: parameters.room.matrixItemId) voiceBroadcastRecorderService = VoiceBroadcastRecorderService(session: parameters.session, roomId: parameters.room.matrixItemId)
let details = VoiceBroadcastRecorderDetails(senderDisplayName: parameters.senderDisplayName) let details = VoiceBroadcastRecorderDetails(senderDisplayName: parameters.senderDisplayName, avatarData: parameters.room.avatarData)
let viewModel = VoiceBroadcastRecorderViewModel(details: details, let viewModel = VoiceBroadcastRecorderViewModel(details: details,
recorderService: voiceBroadcastRecorderService) recorderService: voiceBroadcastRecorderService)
voiceBroadcastRecorderViewModel = viewModel voiceBroadcastRecorderViewModel = viewModel
@ -56,7 +56,9 @@ final class VoiceBroadcastRecorderCoordinator: Coordinator, Presentable {
func start() { } func start() { }
func toPresentable() -> UIViewController { func toPresentable() -> UIViewController {
VectorHostingController(rootView: VoiceBroadcastRecorderView(viewModel: voiceBroadcastRecorderViewModel.context)) let view = VoiceBroadcastRecorderView(viewModel: voiceBroadcastRecorderViewModel.context)
.addDependency(AvatarService.instantiate(mediaManager: parameters.session.mediaManager))
return VectorHostingController(rootView: view)
} }
func pauseRecording() { func pauseRecording() {

View file

@ -33,7 +33,7 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
private var chunkFile: AVAudioFile! = nil private var chunkFile: AVAudioFile! = nil
private var chunkFrames: AVAudioFrameCount = 0 private var chunkFrames: AVAudioFrameCount = 0
private var chunkFileNumber: Int = 1 private var chunkFileNumber: Int = 0
// MARK: Public // MARK: Public
@ -63,14 +63,16 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
} }
try? audioEngine.start() try? audioEngine.start()
// Disable the sleep mode during the recording until we are able to handle it
UIApplication.shared.isIdleTimerDisabled = true
} }
func stopRecordingVoiceBroadcast() { func stopRecordingVoiceBroadcast() {
MXLog.debug("[VoiceBroadcastRecorderService] Stop recording voice broadcast") MXLog.debug("[VoiceBroadcastRecorderService] Stop recording voice broadcast")
audioEngine.stop() audioEngine.stop()
audioEngine.inputNode.removeTap(onBus: audioNodeBus) audioEngine.inputNode.removeTap(onBus: audioNodeBus)
UIApplication.shared.isIdleTimerDisabled = false
resetValues()
voiceBroadcastService?.stopVoiceBroadcast(success: { [weak self] _ in voiceBroadcastService?.stopVoiceBroadcast(success: { [weak self] _ in
MXLog.debug("[VoiceBroadcastRecorderService] Stopped") MXLog.debug("[VoiceBroadcastRecorderService] Stopped")
@ -82,25 +84,33 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
// Send current chunk // Send current chunk
if self.chunkFile != nil { if self.chunkFile != nil {
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber) self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber) {
self.tearDownVoiceBroadcastService()
}
} else {
self.tearDownVoiceBroadcastService()
} }
self.session.tearDownVoiceBroadcastService()
}, failure: { error in }, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to stop voice broadcast", context: error) MXLog.error("[VoiceBroadcastRecorderService] Failed to stop voice broadcast", context: error)
// Discard the service on VoiceBroadcastService error. We keep the service in case of other error type
if error as? VoiceBroadcastServiceError != nil {
self.tearDownVoiceBroadcastService()
}
}) })
} }
func pauseRecordingVoiceBroadcast() { func pauseRecordingVoiceBroadcast() {
audioEngine.pause() audioEngine.pause()
UIApplication.shared.isIdleTimerDisabled = false
voiceBroadcastService?.pauseVoiceBroadcast(success: { [weak self] _ in voiceBroadcastService?.pauseVoiceBroadcast(success: { [weak self] _ in
guard let self = self else { return } guard let self = self else { return }
// Send current chunk // Send current chunk
if self.chunkFile != nil {
self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber) self.sendChunkFile(at: self.chunkFile.url, sequence: self.chunkFileNumber)
self.chunkFile = nil self.chunkFile = nil
}
}, failure: { error in }, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to pause voice broadcast", context: error) MXLog.error("[VoiceBroadcastRecorderService] Failed to pause voice broadcast", context: error)
}) })
@ -113,7 +123,8 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
guard let self = self else { return } guard let self = self else { return }
// Update state // Update state
self.serviceDelegate?.voiceBroadcastRecorderService(self, didUpdateState: .started) self.serviceDelegate?.voiceBroadcastRecorderService(self, didUpdateState: .resumed)
UIApplication.shared.isIdleTimerDisabled = true
}, failure: { error in }, failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to resume voice broadcast", context: error) MXLog.error("[VoiceBroadcastRecorderService] Failed to resume voice broadcast", context: error)
}) })
@ -123,7 +134,13 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
/// Reset chunk values. /// Reset chunk values.
private func resetValues() { private func resetValues() {
chunkFrames = 0 chunkFrames = 0
chunkFileNumber = 1 chunkFileNumber = 0
}
/// Release the service
private func tearDownVoiceBroadcastService() {
resetValues()
session.tearDownVoiceBroadcastService()
} }
/// Write audio buffer to chunk file. /// Write audio buffer to chunk file.
@ -150,6 +167,7 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
// FIXME: Manage error // FIXME: Manage error
return return
} }
chunkFileNumber += 1
let temporaryFileName = "VoiceBroadcastChunk-\(roomId)-\(chunkFileNumber)" let temporaryFileName = "VoiceBroadcastChunk-\(roomId)-\(chunkFileNumber)"
let fileUrl = directory let fileUrl = directory
.appendingPathComponent(temporaryFileName) .appendingPathComponent(temporaryFileName)
@ -165,18 +183,20 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
chunkFile = try? AVAudioFile(forWriting: fileUrl, settings: settings) chunkFile = try? AVAudioFile(forWriting: fileUrl, settings: settings)
if chunkFile != nil { if chunkFile != nil {
chunkFileNumber += 1
chunkFrames = 0 chunkFrames = 0
} else { } else {
chunkFileNumber -= 1
stopRecordingVoiceBroadcast() stopRecordingVoiceBroadcast()
// FIXME: Manage error ? // FIXME: Manage error ?
} }
} }
/// Send chunk file to the server. /// Send chunk file to the server.
private func sendChunkFile(at url: URL, sequence: Int) { private func sendChunkFile(at url: URL, sequence: Int, completion: (() -> Void)? = nil) {
guard let voiceBroadcastService = voiceBroadcastService else { guard voiceBroadcastService != nil else {
// FIXME: Manage error // FIXME: Manage error
MXLog.debug("[VoiceBroadcastRecorderService] sendChunkFile: service is not available")
completion?()
return return
} }
@ -200,21 +220,29 @@ class VoiceBroadcastRecorderService: VoiceBroadcastRecorderServiceProtocol {
} }
convertAACToM4A(at: url) { [weak self] convertedUrl in convertAACToM4A(at: url) { [weak self] convertedUrl in
guard let self = self else { return } guard let self = self else {
completion?()
return
}
// Delete the source file.
self.deleteRecording(at: url)
if let convertedUrl = convertedUrl { if let convertedUrl = convertedUrl {
dispatchGroup.notify(queue: .main) { dispatchGroup.notify(queue: .main) {
self.voiceBroadcastService?.sendChunkOfVoiceBroadcast(audioFileLocalURL: convertedUrl, self.voiceBroadcastService?.sendChunkOfVoiceBroadcast(audioFileLocalURL: convertedUrl,
mimeType: "audio/mp4", mimeType: "audio/mp4",
duration: UInt(duration * 1000), duration: UInt(duration * 1000),
samples: nil,
sequence: UInt(sequence)) { eventId in sequence: UInt(sequence)) { eventId in
MXLog.debug("[VoiceBroadcastRecorderService] Send voice broadcast chunk with success.") MXLog.debug("[VoiceBroadcastRecorderService] Send voice broadcast chunk with success.")
if eventId != nil { self.deleteRecording(at: convertedUrl)
self.deleteRecording(at: url) completion?()
}
} failure: { error in } failure: { error in
MXLog.error("[VoiceBroadcastRecorderService] Failed to send voice broadcast chunk.", context: error) MXLog.error("[VoiceBroadcastRecorderService] Failed to send voice broadcast chunk.", context: error)
// Do not delete the file to be sent if request failed, the retry flow will need it
// There's no manual mechanism to clean it up afterwards but the tmp folder
// they live in will eventually be deleted by the system
completion?()
} }
} }
} }

View file

@ -23,6 +23,13 @@ struct VoiceBroadcastRecorderView: View {
@Environment(\.theme) private var theme: ThemeSwiftUI @Environment(\.theme) private var theme: ThemeSwiftUI
private var backgroundColor: Color {
if viewModel.viewState.recordingState != .paused {
return theme.colors.alert
}
return theme.colors.quarterlyContent
}
// MARK: Public // MARK: Public
@ObservedObject var viewModel: VoiceBroadcastRecorderViewModel.Context @ObservedObject var viewModel: VoiceBroadcastRecorderViewModel.Context
@ -30,10 +37,35 @@ struct VoiceBroadcastRecorderView: View {
var body: some View { var body: some View {
let details = viewModel.viewState.details let details = viewModel.viewState.details
VStack(alignment: .leading, spacing: 16.0) { VStack(alignment: .center) {
Text(details.senderDisplayName ?? "")
HStack(alignment: .top) {
AvatarImage(avatarData: viewModel.viewState.details.avatarData, size: .xSmall)
VStack(alignment: .leading, spacing: 0) {
Text(details.avatarData.displayName ?? details.avatarData.matrixItemId)
.font(theme.fonts.bodySB) .font(theme.fonts.bodySB)
.foregroundColor(theme.colors.primaryContent) .foregroundColor(theme.colors.primaryContent)
Label {
Text(VectorL10n.voiceBroadcastTile)
.foregroundColor(theme.colors.secondaryContent)
.font(theme.fonts.caption1)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastTileLive.image)
}
}.frame(maxWidth: .infinity, alignment: .leading)
Label {
Text(VectorL10n.voiceBroadcastLive)
.font(theme.fonts.caption1SB)
.foregroundColor(Color.white)
} icon: {
Image(uiImage: Asset.Images.voiceBroadcastLive.image)
}
.padding(.horizontal, 5)
.background(RoundedRectangle(cornerRadius: 4, style: .continuous).fill(backgroundColor))
.accessibilityIdentifier("liveButton")
}
HStack(alignment: .top, spacing: 16.0) { HStack(alignment: .top, spacing: 16.0) {
Button { Button {

View file

@ -32,6 +32,7 @@ enum VoiceBroadcastRecorderState {
struct VoiceBroadcastRecorderDetails { struct VoiceBroadcastRecorderDetails {
let senderDisplayName: String? let senderDisplayName: String?
let avatarData: AvatarInputProtocol
} }
struct VoiceBroadcastRecorderViewState: BindableState { struct VoiceBroadcastRecorderViewState: BindableState {

View file

@ -31,7 +31,7 @@ enum MockVoiceBroadcastRecorderScreenState: MockScreenState, CaseIterable {
} }
var screenView: ([Any], AnyView) { var screenView: ([Any], AnyView) {
let details = VoiceBroadcastRecorderDetails(senderDisplayName: "") let details = VoiceBroadcastRecorderDetails(senderDisplayName: "", avatarData: AvatarInput(mxContentUri: "", matrixItemId: "!fakeroomid:matrix.org", displayName: "The name of the room"))
let viewModel = MockVoiceBroadcastRecorderViewModel(initialViewState: VoiceBroadcastRecorderViewState(details: details, recordingState: .started, bindings: VoiceBroadcastRecorderViewStateBindings())) let viewModel = MockVoiceBroadcastRecorderViewModel(initialViewState: VoiceBroadcastRecorderViewState(details: details, recordingState: .started, bindings: VoiceBroadcastRecorderViewStateBindings()))
return ( return (

1
changelog.d/6980.change Normal file
View file

@ -0,0 +1 @@
Labs: Rich text-editor - Add support for plain text mode