Skip to content

Commit 98dc32f

Browse files
author
Isaac
committed
Merge commit '0077a070f2e6a9cb831b6e017260edd0f29d91e8'
2 parents 7b72c1a + 0077a07 commit 98dc32f

File tree

9 files changed

+186
-45
lines changed

9 files changed

+186
-45
lines changed

submodules/ChatInterfaceState/Sources/ChatInterfaceState.swift

Lines changed: 17 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -282,15 +282,15 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
282282
public struct Audio: Codable, Equatable {
283283
public let resource: LocalFileMediaResource
284284
public let fileSize: Int32
285-
public let duration: Int32
285+
public let duration: Double
286286
public let waveform: AudioWaveform
287287
public let trimRange: Range<Double>?
288288
public let resumeData: Data?
289289

290290
public init(
291291
resource: LocalFileMediaResource,
292292
fileSize: Int32,
293-
duration: Int32,
293+
duration: Double,
294294
waveform: AudioWaveform,
295295
trimRange: Range<Double>?,
296296
resumeData: Data?
@@ -310,7 +310,12 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
310310
self.resource = LocalFileMediaResource(decoder: PostboxDecoder(buffer: MemoryBuffer(data: resourceData.data)))
311311

312312
self.fileSize = try container.decode(Int32.self, forKey: "s")
313-
self.duration = try container.decode(Int32.self, forKey: "d")
313+
314+
if let doubleValue = try container.decodeIfPresent(Double.self, forKey: "dd") {
315+
self.duration = doubleValue
316+
} else {
317+
self.duration = Double(try container.decode(Int32.self, forKey: "d"))
318+
}
314319

315320
let waveformData = try container.decode(Data.self, forKey: "wd")
316321
let waveformPeak = try container.decode(Int32.self, forKey: "wp")
@@ -330,7 +335,7 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
330335

331336
try container.encode(PostboxEncoder().encodeObjectToRawData(self.resource), forKey: "r")
332337
try container.encode(self.fileSize, forKey: "s")
333-
try container.encode(self.duration, forKey: "d")
338+
try container.encode(self.duration, forKey: "dd")
334339
try container.encode(self.waveform.samples, forKey: "wd")
335340
try container.encode(self.waveform.peak, forKey: "wp")
336341

@@ -368,13 +373,13 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
368373
}
369374

370375
public struct Video: Codable, Equatable {
371-
public let duration: Int32
376+
public let duration: Double
372377
public let frames: [UIImage]
373378
public let framesUpdateTimestamp: Double
374379
public let trimRange: Range<Double>?
375380

376381
public init(
377-
duration: Int32,
382+
duration: Double,
378383
frames: [UIImage],
379384
framesUpdateTimestamp: Double,
380385
trimRange: Range<Double>?
@@ -388,7 +393,11 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
388393
public init(from decoder: Decoder) throws {
389394
let container = try decoder.container(keyedBy: StringCodingKey.self)
390395

391-
self.duration = try container.decode(Int32.self, forKey: "d")
396+
if let doubleValue = try container.decodeIfPresent(Double.self, forKey: "dd") {
397+
self.duration = doubleValue
398+
} else {
399+
self.duration = Double(try container.decode(Int32.self, forKey: "d"))
400+
}
392401
self.frames = []
393402
self.framesUpdateTimestamp = try container.decode(Double.self, forKey: "fu")
394403
if let trimLowerBound = try container.decodeIfPresent(Double.self, forKey: "tl"), let trimUpperBound = try container.decodeIfPresent(Double.self, forKey: "tu") {
@@ -401,7 +410,7 @@ public enum ChatInterfaceMediaDraftState: Codable, Equatable {
401410
public func encode(to encoder: Encoder) throws {
402411
var container = encoder.container(keyedBy: StringCodingKey.self)
403412

404-
try container.encode(self.duration, forKey: "d")
413+
try container.encode(self.duration, forKey: "dd")
405414
try container.encode(self.framesUpdateTimestamp, forKey: "fu")
406415
if let trimRange = self.trimRange {
407416
try container.encode(trimRange.lowerBound, forKey: "tl")

submodules/LegacyUI/Sources/LegacyController.swift

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,13 +271,25 @@ public final class LegacyControllerContext: NSObject, LegacyComponentsContext {
271271
return
272272
}
273273

274+
var position: TooltipScreen.ArrowPosition = .bottom
275+
if let layout = self.controller?.currentlyAppliedLayout, let orientation = layout.metrics.orientation {
276+
switch orientation {
277+
case .landscapeLeft:
278+
position = .left
279+
case .landscapeRight:
280+
position = .right
281+
default:
282+
break
283+
}
284+
}
285+
274286
let controller = TooltipScreen(
275287
account: context.account,
276288
sharedContext: context.sharedContext,
277289
text: .plain(text: text),
278290
style: .customBlur(UIColor(rgb: 0x18181a), 0.0),
279291
icon: .image(icon),
280-
location: .point(sourceRect, .bottom),
292+
location: .point(sourceRect, position),
281293
displayDuration: .custom(2.0),
282294
shouldDismissOnTouch: { _, _ in
283295
return .dismiss(consume: false)

submodules/MediaPlayer/Sources/MediaPlayerTimeTextNode.swift

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
157157

158158
private func ensureHasTimer() {
159159
if self.updateTimer == nil {
160-
let timer = SwiftSignalKit.Timer(timeout: 0.5, repeat: true, completion: { [weak self] in
160+
let timer = SwiftSignalKit.Timer(timeout: 0.2, repeat: true, completion: { [weak self] in
161161
self?.updateTimestamp()
162162
}, queue: Queue.mainQueue())
163163
self.updateTimer = timer
@@ -182,7 +182,12 @@ public final class MediaPlayerTimeTextNode: ASDisplayNode {
182182
duration = trimRange.upperBound - trimRange.lowerBound
183183
}
184184

185-
if self.showDurationIfNotStarted && (timestamp < .ulpOfOne || self.isScrubbing) {
185+
var isPlaying = false
186+
if case .playing = statusValue.status {
187+
isPlaying = true
188+
}
189+
190+
if self.showDurationIfNotStarted && (timestamp < .ulpOfOne || self.isScrubbing) && !isPlaying {
186191
let timestamp = Int32(duration)
187192
self.state = MediaPlayerTimeTextNodeState(hours: timestamp / (60 * 60), minutes: timestamp % (60 * 60) / 60, seconds: timestamp % 60)
188193
} else {

submodules/TelegramCore/Sources/Utils/PeerUtils.swift

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,14 @@ public extension Peer {
279279
}
280280
}
281281

282+
var displayForumAsTabs: Bool {
283+
if let channel = self as? TelegramChannel, isForum {
284+
return channel.flags.contains(.displayForumAsTabs)
285+
} else {
286+
return false
287+
}
288+
}
289+
282290
var isForumOrMonoForum: Bool {
283291
if let channel = self as? TelegramChannel {
284292
return channel.flags.contains(.isForum) || channel.flags.contains(.isMonoforum)
@@ -460,7 +468,7 @@ public func peerViewMonoforumMainPeer(_ view: PeerView) -> Peer? {
460468
if let channel = peer as? TelegramChannel, channel.flags.contains(.isMonoforum), let linkedMonoforumId = channel.linkedMonoforumId {
461469
return view.peers[linkedMonoforumId]
462470
} else {
463-
return peer
471+
return nil
464472
}
465473
} else {
466474
return nil

submodules/TelegramNotices/Sources/Notices.swift

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -206,6 +206,7 @@ private enum ApplicationSpecificGlobalNotice: Int32 {
206206
case multipleStoriesTooltip = 79
207207
case voiceMessagesPauseSuggestion = 80
208208
case videoMessagesPauseSuggestion = 81
209+
case voiceMessagesResumeTrimWarning = 82
209210

210211
var key: ValueBoxKey {
211212
let v = ValueBoxKey(length: 4)
@@ -579,6 +580,10 @@ private struct ApplicationSpecificNoticeKeys {
579580
static func videoMessagesPauseSuggestion() -> NoticeEntryKey {
580581
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.videoMessagesPauseSuggestion.key)
581582
}
583+
584+
static func voiceMessagesResumeTrimWarning() -> NoticeEntryKey {
585+
return NoticeEntryKey(namespace: noticeNamespace(namespace: globalNamespace), key: ApplicationSpecificGlobalNotice.voiceMessagesResumeTrimWarning.key)
586+
}
582587
}
583588

584589
public struct ApplicationSpecificNotice {
@@ -2522,4 +2527,31 @@ public struct ApplicationSpecificNotice {
25222527
return Int(previousValue)
25232528
}
25242529
}
2530+
2531+
public static func getVoiceMessagesResumeTrimWarning(accountManager: AccountManager<TelegramAccountManagerTypes>) -> Signal<Int32, NoError> {
2532+
return accountManager.transaction { transaction -> Int32 in
2533+
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesResumeTrimWarning())?.get(ApplicationSpecificCounterNotice.self) {
2534+
return value.value
2535+
} else {
2536+
return 0
2537+
}
2538+
}
2539+
}
2540+
2541+
public static func incrementVoiceMessagesResumeTrimWarning(accountManager: AccountManager<TelegramAccountManagerTypes>, count: Int = 1) -> Signal<Int, NoError> {
2542+
return accountManager.transaction { transaction -> Int in
2543+
var currentValue: Int32 = 0
2544+
if let value = transaction.getNotice(ApplicationSpecificNoticeKeys.voiceMessagesResumeTrimWarning())?.get(ApplicationSpecificCounterNotice.self) {
2545+
currentValue = value.value
2546+
}
2547+
let previousValue = currentValue
2548+
currentValue += Int32(count)
2549+
2550+
if let entry = CodableEntry(ApplicationSpecificCounterNotice(value: currentValue)) {
2551+
transaction.setNotice(ApplicationSpecificNoticeKeys.voiceMessagesResumeTrimWarning(), entry)
2552+
}
2553+
2554+
return Int(previousValue)
2555+
}
2556+
}
25252557
}

submodules/TelegramUI/Sources/Chat/ChatControllerMediaRecording.swift

Lines changed: 63 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -325,7 +325,7 @@ extension ChatControllerImpl {
325325
ChatInterfaceMediaDraftState.Audio(
326326
resource: resource!,
327327
fileSize: Int32(data.compressedData.count),
328-
duration: Int32(data.duration),
328+
duration: data.duration,
329329
waveform: audioWaveform,
330330
trimRange: data.trimRange,
331331
resumeData: data.resumeData
@@ -439,7 +439,7 @@ extension ChatControllerImpl {
439439
$0.updatedInterfaceState {
440440
$0.withUpdatedMediaDraftState(.video(
441441
ChatInterfaceMediaDraftState.Video(
442-
duration: Int32(data.duration),
442+
duration: data.duration,
443443
frames: data.frames,
444444
framesUpdateTimestamp: data.framesUpdateTimestamp,
445445
trimRange: data.trimRange
@@ -494,7 +494,7 @@ extension ChatControllerImpl {
494494
})
495495
} else {
496496
let proceed = {
497-
self.withAudioRecorder({ audioRecorder in
497+
self.withAudioRecorder(resuming: true, { audioRecorder in
498498
audioRecorder.resume()
499499

500500
self.updateChatPresentationInterfaceState(animated: true, interactive: true, {
@@ -505,24 +505,34 @@ extension ChatControllerImpl {
505505
})
506506
}
507507

508-
//TODO:localize
509-
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let _ = audio.trimRange {
510-
self.present(
511-
textAlertController(
512-
context: self.context,
513-
title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Title,
514-
text: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Text,
515-
actions: [
516-
TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}),
517-
TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Proceed, action: {
518-
proceed()
519-
})
520-
]
521-
), in: .window(.root)
522-
)
523-
} else {
524-
proceed()
525-
}
508+
let _ = (ApplicationSpecificNotice.getVoiceMessagesResumeTrimWarning(accountManager: self.context.sharedContext.accountManager)
509+
|> deliverOnMainQueue).start(next: { [weak self] count in
510+
guard let self else {
511+
return
512+
}
513+
if count > 0 {
514+
proceed()
515+
return
516+
}
517+
if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview, let trimRange = audio.trimRange, trimRange.lowerBound > 0.1 || trimRange.upperBound < audio.duration {
518+
self.present(
519+
textAlertController(
520+
context: self.context,
521+
title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Title,
522+
text: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Text,
523+
actions: [
524+
TextAlertAction(type: .genericAction, title: self.presentationData.strings.Common_Cancel, action: {}),
525+
TextAlertAction(type: .defaultAction, title: self.presentationData.strings.Chat_TrimVoiceMessageToResume_Proceed, action: {
526+
proceed()
527+
let _ = ApplicationSpecificNotice.incrementVoiceMessagesResumeTrimWarning(accountManager: self.context.sharedContext.accountManager).start()
528+
})
529+
]
530+
), in: .window(.root)
531+
)
532+
} else {
533+
proceed()
534+
}
535+
})
526536
}
527537
}
528538

@@ -607,13 +617,43 @@ extension ChatControllerImpl {
607617
self.present(tooltipController, in: .window(.root))
608618
}
609619

610-
private func withAudioRecorder(_ f: (ManagedAudioRecorder) -> Void) {
620+
private func withAudioRecorder(resuming: Bool, _ f: (ManagedAudioRecorder) -> Void) {
611621
if let audioRecorder = self.audioRecorderValue {
612622
f(audioRecorder)
613623
} else if let recordedMediaPreview = self.presentationInterfaceState.interfaceState.mediaDraftState, case let .audio(audio) = recordedMediaPreview {
614624
self.requestAudioRecorder(beginWithTone: false, existingDraft: audio)
615625
if let audioRecorder = self.audioRecorderValue {
616626
f(audioRecorder)
627+
628+
if !resuming {
629+
self.recorderDataDisposable.set(
630+
(audioRecorder.takenRecordedData()
631+
|> deliverOnMainQueue).startStrict(
632+
next: { [weak self] data in
633+
if let strongSelf = self, let data = data {
634+
let audioWaveform = audio.waveform
635+
636+
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: true, {
637+
$0.updatedInterfaceState {
638+
$0.withUpdatedMediaDraftState(.audio(
639+
ChatInterfaceMediaDraftState.Audio(
640+
resource: audio.resource,
641+
fileSize: Int32(data.compressedData.count),
642+
duration: data.duration,
643+
waveform: audioWaveform,
644+
trimRange: data.trimRange,
645+
resumeData: data.resumeData
646+
)
647+
))
648+
}.updatedInputTextPanelState { panelState in
649+
return panelState.withUpdatedMediaRecordingState(nil)
650+
}
651+
})
652+
strongSelf.updateDownButtonVisibility()
653+
}
654+
})
655+
)
656+
}
617657
}
618658
}
619659
}
@@ -622,7 +662,7 @@ extension ChatControllerImpl {
622662
if let videoRecorder = self.videoRecorderValue {
623663
videoRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
624664
} else {
625-
self.withAudioRecorder({ audioRecorder in
665+
self.withAudioRecorder(resuming: false, { audioRecorder in
626666
audioRecorder.updateTrimRange(start: start, end: end, updatedEnd: updatedEnd, apply: apply)
627667
})
628668
}

0 commit comments

Comments
 (0)