Compare commits

...

9 Commits

15 changed files with 910 additions and 472 deletions

View File

@ -8,10 +8,10 @@ enum AppRoute: Hashable {
case feedbackDetail(type: FeedbackView.FeedbackType)
case mediaUpload
case blindBox(mediaType: BlindBoxMediaType, blindBoxId: String? = nil)
case blindOutcome(media: MediaType, time: String? = nil, description: String? = nil, isMember: Bool)
case blindOutcome(media: MediaType, title: String? = nil, description: String? = nil, isMember: Bool, goToFeedback: Bool = false)
case memories
case subscribe
case userInfo
case userInfo(createFirstBlindBox: Bool = false)
case account
case about
case permissionManagement
@ -33,14 +33,26 @@ enum AppRoute: Hashable {
MediaUploadView()
case .blindBox(let mediaType, let blindBoxId):
BlindBoxView(mediaType: mediaType, blindBoxId: blindBoxId)
case .blindOutcome(let media, let time, let description, let isMember):
BlindOutcomeView(media: media, time: time, description: description, isMember: isMember)
case .blindOutcome(let media, let title, let description, let isMember, let goToFeedback):
BlindOutcomeView(
media: media,
title: title,
description: description,
isMember: isMember,
onContinue: {
if goToFeedback {
Router.shared.navigate(to: .feedbackView)
} else {
Router.shared.navigate(to: .blindBox(mediaType: .all))
}
}
)
case .memories:
MemoriesView()
case .subscribe:
SubscribeView()
case .userInfo:
UserInfo()
case .userInfo(let createFirstBlindBox):
UserInfo(createFirstBlindBox: createFirstBlindBox)
case .account:
AccountView()
case .about:

View File

@ -37,6 +37,15 @@ struct BlindBoxActionButton: View {
.background(Color.themePrimary)
.foregroundColor(Color.themeTextMessageMain)
.cornerRadius(32)
case .opening:
Text("Ready")
.font(Typography.font(for: .body))
.fontWeight(.bold)
.frame(maxWidth: .infinity)
.padding()
.background(Color.themePrimary)
.foregroundColor(Color.themeTextMessageMain)
.cornerRadius(32)
default:
Text("Go to Buy")
.font(Typography.font(for: .body))

View File

@ -7,25 +7,78 @@ struct BlindBoxLottieOnceView: UIViewRepresentable {
var animationSpeed: CGFloat = 1.0
let onCompleted: () -> Void
func makeUIView(context: Context) -> LottieAnimationView {
func makeUIView(context: Context) -> UIView {
// LottieView
let container = UIView()
container.clipsToBounds = true
let animationView = LottieAnimationView()
animationView.translatesAutoresizingMaskIntoConstraints = false
if let animation = LottieAnimation.named(name) {
animationView.animation = animation
} else if let path = Bundle.main.path(forResource: name, ofType: "json") {
let animation = LottieAnimation.filepath(path)
animationView.animation = animation
}
animationView.loopMode = .playOnce
animationView.animationSpeed = animationSpeed
animationView.contentMode = .scaleAspectFit
animationView.backgroundBehavior = .pauseAndRestore
container.addSubview(animationView)
NSLayoutConstraint.activate([
animationView.leadingAnchor.constraint(equalTo: container.leadingAnchor),
animationView.trailingAnchor.constraint(equalTo: container.trailingAnchor),
animationView.topAnchor.constraint(equalTo: container.topAnchor),
animationView.bottomAnchor.constraint(equalTo: container.bottomAnchor)
])
//
animationView.play { _ in
onCompleted()
}
return animationView
return container
}
func updateUIView(_ uiView: LottieAnimationView, context: Context) {
func updateUIView(_ uiView: UIView, context: Context) {
//
}
}
#if DEBUG
struct BlindBoxLottieOnceView_Previews: PreviewProvider {
static var previews: some View {
Group {
VStack(spacing: 16) {
Text("Opening • x1.0")
.font(.caption)
.foregroundColor(.gray)
BlindBoxLottieOnceView(name: "opening", animationSpeed: 1.0) {
print("Opening completed")
}
.frame(width: 300, height: 300)
.background(Color.themeTextWhiteSecondary)
}
.previewDisplayName("Opening • 1.0x")
VStack(spacing: 16) {
Text("Opening • x1.5")
.font(.caption)
.foregroundColor(.gray)
BlindBoxLottieOnceView(name: "opening", animationSpeed: 1.5) {
print("Opening completed (1.5x)")
}
.frame(width: 300, height: 300)
.background(Color.themeTextWhiteSecondary)
}
.previewDisplayName("Opening • 1.5x")
}
.padding()
.background(Color.themeTextWhiteSecondary)
}
}
#endif

View File

@ -22,6 +22,8 @@ struct BlindBoxView: View {
@State private var showLogin = false
// ViewModel countdownText
@State private var animationPhase: BlindBoxAnimationPhase = .none
//
@State private var isOpening: Bool = false
// -
@Query private var login: [Login]
@ -55,6 +57,8 @@ struct BlindBoxView: View {
viewModel.player?.pause()
viewModel.player?.replaceCurrentItem(with: nil)
viewModel.player = nil
//
isOpening = false
NotificationCenter.default.removeObserver(
self,
@ -153,6 +157,8 @@ struct BlindBoxView: View {
maxWidth: .infinity,
maxHeight: UIScreen.main.bounds.height * 0.65
)
//
.zIndex(animationPhase == .opening ? 1 : 0)
// TODO
@ -161,12 +167,23 @@ struct BlindBoxView: View {
phase: animationPhase,
countdownText: viewModel.countdownText,
onOpen: {
openBlindBoxAndUpdateState(navigateAfterOpen: true)
//
guard !isOpening else { return }
isOpening = true
// onOpeningCompleted
openBlindBoxAndUpdateState(navigateAfterOpen: false)
},
onGoToBuy: {
Router.shared.navigate(to: .mediaUpload)
}
)
.disabled(isOpening)
//
.opacity(animationPhase == .opening ? 0 : 1)
//
.animation(.easeInOut(duration: 0.2), value: animationPhase)
//
.allowsHitTesting(animationPhase != .opening)
.padding(.horizontal)
}
}
@ -259,6 +276,8 @@ struct BlindBoxView: View {
}
} catch {
print("❌ 开启盲盒失败: \(error)")
//
isOpening = false
}
}
}
@ -283,11 +302,14 @@ struct BlindBoxView: View {
Router.shared.navigate(
to: .blindOutcome(
media: .video(url, nil),
time: viewModel.blindGenerate?.name ?? "Your box",
title: viewModel.blindGenerate?.name ?? "Your box",
description: viewModel.blindGenerate?.description ?? "",
isMember: viewModel.isMember
isMember: viewModel.isMember,
goToFeedback: false
)
)
//
isOpening = false
return
}
} else if mediaType == .image {
@ -303,16 +325,21 @@ struct BlindBoxView: View {
Router.shared.navigate(
to: .blindOutcome(
media: .image(image),
time: viewModel.blindGenerate?.name ?? "Your box",
title: viewModel.blindGenerate?.name ?? "Your box",
description: viewModel.blindGenerate?.description ?? "",
isMember: viewModel.isMember
isMember: viewModel.isMember,
goToFeedback: true
)
)
//
isOpening = false
return
}
}
// 便
print("⚠️ navigateToOutcome: 媒体尚未准备好videoURL=\(viewModel.videoURL), image=\(String(describing: viewModel.displayImage))")
//
isOpening = false
}
}
}

View File

@ -1,24 +1,24 @@
import SwiftUI
import AVKit
import os.log
struct BlindOutcomeView: View {
let media: MediaType
let time: String?
let title: String?
let description: String?
let isMember: Bool
// Removed presentationMode; use Router.shared.pop() for back navigation
@State private var isFullscreen = false
@State private var isPlaying = false
@State private var showControls = true
@State private var showIPListModal = false
@State private var player: AVPlayer?
let onContinue: () -> Void
let showJoinModal: Bool
init(media: MediaType, time: String? = nil, description: String? = nil, isMember: Bool = false) {
// Removed presentationMode; use Router.shared.pop() for back navigation
@State private var showIPListModal = false
init(media: MediaType, title: String? = nil, description: String? = nil, isMember: Bool = false, onContinue: @escaping () -> Void, showJoinModal: Bool = false) {
self.media = media
self.time = time
self.title = title
self.description = description
self.isMember = isMember
self.onContinue = onContinue
self.showJoinModal = showJoinModal
}
var body: some View {
@ -26,40 +26,17 @@ struct BlindOutcomeView: View {
Color.themeTextWhiteSecondary.ignoresSafeArea()
VStack(spacing: 0) {
//
HStack {
Button(action: {
Router.shared.pop()
}) {
HStack(spacing: 4) {
Image(systemName: "chevron.left")
.font(.headline)
}
.foregroundColor(Color.themeTextMessageMain)
}
.padding(.leading, 16)
//
// NaviHeader(
// title: "Blind Box",
// onBackTap: { Router.shared.pop() },
// showBackButton: true,
// titleStyle: .title,
// backgroundColor: Color.themeTextWhiteSecondary
// )
// .zIndex(1)
Spacer()
Text("Blind Box")
.font(.headline)
.foregroundColor(Color.themeTextMessageMain)
Spacer()
HStack(spacing: 4) {
Image(systemName: "chevron.left")
.opacity(0)
}
.padding(.trailing, 16)
}
.padding(.vertical, 12)
.background(Color.themeTextWhiteSecondary)
.zIndex(1)
Spacer()
.frame(height: 30)
.frame(height: Theme.Spacing.lg)
// Media content
GeometryReader { geometry in
VStack(spacing: 16) {
@ -77,47 +54,35 @@ struct BlindOutcomeView: View {
.frame(maxWidth: .infinity, maxHeight: .infinity)
.cornerRadius(10)
.padding(4)
.onTapGesture {
withAnimation {
isFullscreen.toggle()
}
}
//
case .video(let url, _):
VideoPlayerView(url: url, isPlaying: $isPlaying, player: $player)
WakeVideoPlayer(
url: url,
autoPlay: true,
isLooping: true,
showsControls: true,
allowFullscreen: true,
muteInitially: false,
videoGravity: .resizeAspect
)
.frame(width: UIScreen.main.bounds.width - 40)
.background(Color.clear)
.cornerRadius(10)
.clipped()
.onAppear {
isPlaying = true
}
.onDisappear {
isPlaying = false
player?.pause()
}
.onTapGesture {
withAnimation {
showControls.toggle()
}
}
.fullScreenCover(isPresented: $isFullscreen) {
FullscreenMediaView(media: media, isPresented: $isFullscreen, isPlaying: $isPlaying, player: player)
}
}
if let description = description, !description.isEmpty {
VStack(alignment: .leading, spacing: 2) {
Text("Description")
.font(Typography.font(for: .body, family: .quicksandBold))
.foregroundColor(.themeTextMessageMain)
// Text("Description")
// .font(Typography.font(for: .body, family: .quicksandBold))
// .foregroundColor(.themeTextMessageMain)
Text(description)
.font(.system(size: 12))
.foregroundColor(Color.themeTextMessageMain)
.fixedSize(horizontal: false, vertical: true)
}
.padding(.horizontal, 12)
.padding(.bottom, 12)
.padding(Theme.Spacing.lg)
}
}
.padding(.top, 8)
@ -134,12 +99,12 @@ struct BlindOutcomeView: View {
VStack {
Spacer()
Button(action: {
if case .video = media {
if showJoinModal {
withAnimation {
showIPListModal = true
}
} else {
Router.shared.navigate(to: .feedbackView)
onContinue()
}
}) {
Text("Continue")
@ -150,6 +115,10 @@ struct BlindOutcomeView: View {
.background(Color.themePrimary)
.cornerRadius(26)
}
//
.opacity(showIPListModal ? 0 : 1)
.animation(.easeInOut(duration: 0.2), value: showIPListModal)
.allowsHitTesting(!showIPListModal)
.padding(.horizontal)
}
.padding(.bottom, 20)
@ -157,197 +126,9 @@ struct BlindOutcomeView: View {
}
.navigationBarHidden(true)
.navigationBarBackButtonHidden(true)
.statusBar(hidden: isFullscreen)
.overlay(
JoinModal(isPresented: $showIPListModal)
JoinModal(isPresented: $showIPListModal, onClose: { onContinue() })
)
.onDisappear {
player?.pause()
player = nil
}
}
}
// MARK: - Fullscreen Media View
private struct FullscreenMediaView: View {
let media: MediaType
@Binding var isPresented: Bool
@Binding var isPlaying: Bool
@State private var showControls = true
private let player: AVPlayer?
init(media: MediaType, isPresented: Binding<Bool>, isPlaying: Binding<Bool>, player: AVPlayer?) {
self.media = media
self._isPresented = isPresented
self._isPlaying = isPlaying
self.player = player
}
var body: some View {
ZStack {
Color.black.edgesIgnoringSafeArea(.all)
ZStack {
switch media {
case .image(let uiImage):
Image(uiImage: uiImage)
.resizable()
.scaledToFit()
.frame(maxWidth: .infinity, maxHeight: .infinity)
.onTapGesture {
withAnimation {
showControls.toggle()
}
}
case .video(_, _):
if let player = player {
CustomVideoPlayer(player: player)
.onAppear {
player.play()
isPlaying = true
}
.onDisappear {
player.pause()
isPlaying = false
}
}
}
}
VStack {
HStack {
Button(action: { isPresented = false }) {
Image(systemName: "xmark")
.font(.title2)
.foregroundColor(.white)
.padding()
.background(Color.black.opacity(0.5))
.clipShape(Circle())
}
.padding()
Spacer()
}
Spacer()
}
}
.onDisappear {
player?.pause()
}
}
}
// MARK: - Video Player View
struct VideoPlayerView: UIViewRepresentable {
let url: URL
@Binding var isPlaying: Bool
@Binding var player: AVPlayer?
func makeUIView(context: Context) -> PlayerView {
let view = PlayerView()
let player = view.setupPlayer(url: url)
self.player = player
return view
}
func updateUIView(_ uiView: PlayerView, context: Context) {
if isPlaying {
uiView.play()
} else {
uiView.pause()
}
}
}
// MARK: - Custom Video Player
@available(iOS 14.0, *)
struct CustomVideoPlayer: UIViewControllerRepresentable {
let player: AVPlayer
func makeUIViewController(context: Context) -> AVPlayerViewController {
let controller = AVPlayerViewController()
controller.player = player
controller.showsPlaybackControls = false
controller.videoGravity = .resizeAspect
return controller
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) {
uiViewController.player = player
}
}
// MARK: - Player View
class PlayerView: UIView {
private var player: AVPlayer?
private var playerLayer: AVPlayerLayer?
private var playerItem: AVPlayerItem?
private var playerItemObserver: NSKeyValueObservation?
@discardableResult
func setupPlayer(url: URL) -> AVPlayer {
cleanup()
let asset = AVAsset(url: url)
let playerItem = AVPlayerItem(asset: asset)
self.playerItem = playerItem
player = AVPlayer(playerItem: playerItem)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.videoGravity = .resizeAspect
layer.addSublayer(playerLayer)
self.playerLayer = playerLayer
playerLayer.frame = bounds
NotificationCenter.default.addObserver(
self,
selector: #selector(playerItemDidReachEnd),
name: .AVPlayerItemDidPlayToEndTime,
object: playerItem
)
return player!
}
func play() {
player?.play()
}
func pause() {
player?.pause()
}
private func cleanup() {
if let playerItem = playerItem {
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playerItem)
}
player?.pause()
player?.replaceCurrentItem(with: nil)
player = nil
playerLayer?.removeFromSuperlayer()
playerLayer = nil
playerItem?.cancelPendingSeeks()
playerItem?.asset.cancelLoading()
playerItem = nil
}
@objc private func playerItemDidReachEnd() {
player?.seek(to: .zero)
player?.play()
}
override func layoutSubviews() {
super.layoutSubviews()
playerLayer?.frame = bounds
}
deinit {
cleanup()
}
}
@ -366,27 +147,47 @@ struct BlindOutcomeView_Previews: PreviewProvider {
}
}
private static func remoteImage(_ urlString: String, placeholder: UIColor = .systemPink, size: CGSize = CGSize(width: 300, height: 300)) -> UIImage {
if let url = URL(string: urlString),
let data = try? Data(contentsOf: url),
let image = UIImage(data: data) {
return image
}
return coloredImage(placeholder, size: size)
}
static var previews: some View {
Group {
// 1
BlindOutcomeView(
media: .image(coloredImage(.systemPink)),
time: "00:23",
media: .image(remoteImage("https://cdn.memorywake.com/files/7350515957925810176/original_1752499572813_screenshot-20250514-170854.png")),
title: "00:23",
description: "这是一段示例描述,用于在预览中验证样式与布局。",
isMember: false
isMember: false,
onContinue: {}
)
.previewDisplayName("Image • With Description • Guest")
// 2
BlindOutcomeView(
media: .image(coloredImage(.systemTeal)),
time: nil,
media: .image(remoteImage("https://cdn.memorywake.com/files/7350515957925810176/original_1752499572813_screenshot-20250514-170854.png")),
title: nil,
description: nil,
isMember: true
isMember: true,
onContinue: {}
)
.previewDisplayName("Image • Minimal • Member")
// 3
BlindOutcomeView(
media: .video(URL(string: "https://cdn.memorywake.com/users/7350439663116619888/files/7361241959983353857/7361241920703696897.mp4")!, nil),
title: "00:23",
description: "视频预览示例",
isMember: false,
onContinue: {}
)
.previewDisplayName("Video • With Description • Guest")
}
}
}
#endif

View File

@ -1,125 +0,0 @@
import SwiftUI
import UIKit
struct GIFView: UIViewRepresentable {
let name: String
var onTap: (() -> Void)? = nil
func makeUIView(context: Context) -> UIImageView {
let imageView = UIImageView()
// GIF
guard let url = Bundle.main.url(forResource: name, withExtension: "gif"),
let data = try? Data(contentsOf: url),
let image = UIImage.gif(data: data) else {
return imageView
}
imageView.image = image
imageView.contentMode = .scaleAspectFit
//
if onTap != nil {
imageView.isUserInteractionEnabled = true
let tapGesture = UITapGestureRecognizer(target: context.coordinator, action: #selector(Coordinator.handleTap))
imageView.addGestureRecognizer(tapGesture)
}
return imageView
}
func updateUIView(_ uiView: UIImageView, context: Context) {}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject {
var parent: GIFView
init(_ parent: GIFView) {
self.parent = parent
}
@objc func handleTap() {
parent.onTap?()
}
}
}
// UIImageGIF
extension UIImage {
static func gif(data: Data) -> UIImage? {
guard let source = CGImageSourceCreateWithData(data as CFData, nil) else {
print("无法创建CGImageSource")
return nil
}
let count = CGImageSourceGetCount(source)
var images = [UIImage]()
var duration: TimeInterval = 0
for i in 0..<count {
guard let cgImage = CGImageSourceCreateImageAtIndex(source, i, nil) else {
continue
}
duration += UIImage.gifDelayForImageAtIndex(source: source, index: i)
images.append(UIImage(cgImage: cgImage, scale: UIScreen.main.scale, orientation: .up))
}
if count == 1 {
return images.first
} else {
return UIImage.animatedImage(with: images, duration: duration)
}
}
static func gifDelayForImageAtIndex(source: CGImageSource, index: Int) -> TimeInterval {
var delay = 0.1
let cfProperties = CGImageSourceCopyPropertiesAtIndex(source, index, nil)
let properties = cfProperties as? [String: Any] ?? [:]
let gifProperties = properties[kCGImagePropertyGIFDictionary as String] as? [String: Any] ?? [:]
if let delayTime = gifProperties[kCGImagePropertyGIFUnclampedDelayTime as String] as? Double {
delay = delayTime
} else if let delayTime = gifProperties[kCGImagePropertyGIFDelayTime as String] as? Double {
delay = delayTime
}
if delay < 0.011 {
delay = 0.1
}
return delay
}
}
// 使 -
struct GIFWithTapExample: View {
@State private var tapCount = 0
var body: some View {
VStack(spacing: 20) {
Text("点击GIF图片")
.font(.title)
GIFView(name: "Blind") {
//
Router.shared.navigate(to: .blindBox(mediaType: .video))
}
.frame(width: 300, height: 300)
.border(Color.blue) //
Text("点击次数: \(tapCount)")
.font(.subheadline)
}
}
}
struct GIFWithTapExample_Previews: PreviewProvider {
static var previews: some View {
GIFWithTapExample()
}
}

View File

@ -1,2 +1,76 @@
# SharedUI/Media
媒体通用视图:`GIFView.swift``SVGImage.swift`/`SVGImageHtml.swift` 等。
媒体通用视图与组件。
## WakeVideoPlayer
一个遵循项目 Theme 风格的 SwiftUI 视频播放组件,基于 `AVKit` 封装。
支持:
- 播放 / 暂停
- 进度条拖动(支持拖动中不打断播放进度回调)
- 静音切换
- 全屏播放(`fullScreenCover`
- 自动隐藏控件(播放中 2.5s 无操作自动隐藏)
- 自动播放与循环播放
- 自定义填充模式(`videoGravity`
### 用法示例
```swift
import SwiftUI
struct DemoVideoCard: View {
var body: some View {
WakeVideoPlayer(
url: URL(string: "https://devstreaming-cdn.apple.com/videos/wwdc/2020/10653/4/17B5F5F3-4D9E-4BAE-8E8F-2C3C7A01F3F2/cmaf.m3u8")!,
autoPlay: false,
isLooping: true,
showsControls: true,
allowFullscreen: true,
muteInitially: false,
videoGravity: .resizeAspectFill
)
.frame(height: 220)
.clipShape(RoundedRectangle(cornerRadius: Theme.CornerRadius.large, style: .continuous))
.shadow(color: Theme.Shadows.cardShadow.color, radius: Theme.Shadows.cardShadow.radius, x: Theme.Shadows.cardShadow.x, y: Theme.Shadows.cardShadow.y)
.padding()
}
}
```
### 初始化参数
- `url: URL` 必填。视频资源地址,支持网络或本地文件 URL。
- `autoPlay: Bool = true` 首次出现是否自动播放。
- `isLooping: Bool = false` 是否循环播放。
- `showsControls: Bool = true` 是否显示自定义控制层。
- `allowFullscreen: Bool = true` 是否允许进入全屏播放。
- `muteInitially: Bool = false` 初始是否静音。
- `videoGravity: AVLayerVideoGravity = .resizeAspectFill` 视频填充模式,如 `.resizeAspect` / `.resizeAspectFill`
- `fallbackURL: URL? = nil` 备用码流地址(建议提供 H.264/HLS。当检测到资源为 HEVC 且当前环境不支持硬解码(如模拟器)时,自动使用该地址播放。
### 注意事项
- 如果是新加入的文件,确保在 Xcode 中将 `WakeVideoPlayer.swift` 添加到对应 Target否则无法被编译。
- 远程流地址需确保允许跨域与 HTTPS示例使用 Apple 公共 HLS 资源。
- 如果需要画中画PiP、双击快退/快进、手势亮度/音量等高级功能,可在此基础上扩展。
### HEVC/H.265 支持说明与降级策略
- 模拟器通常不支持 HEVC 硬解码表现为“只有声音、无画面”。真机A9 及以上设备)通常支持。
- 组件会在加载时异步分析资源轨道编码;若检测到 HEVC 且当前环境不支持硬解码,则:
- 若提供了 `fallbackURL`(建议为 H.264 或多码率 HLS将自动切换播放该备用源
- 若未提供 `fallbackURL`,会显示顶部黄色提示,建议在真机测试或提供备用码流。
示例:
```swift
WakeVideoPlayer(
url: URL(string: "https://example.com/video_h265.mp4")!,
fallbackURL: URL(string: "https://example.com/video_h264.m3u8")!,
autoPlay: true,
isLooping: false,
showsControls: true,
allowFullscreen: true,
muteInitially: false,
videoGravity: .resizeAspect
)
.frame(height: 220)
```
建议优先使用 HLS.m3u8主清单内含多编码/多分辨率分流,兼容性更佳。

View File

@ -1,16 +0,0 @@
import SwiftUI
// Deprecated: SVG runtime rendering removed. This view is a no-op placeholder to keep API compatibility.
struct SVGImage: View {
let svgName: String
var contentMode: ContentMode = .fit
var tintColor: Color?
var body: some View {
Color.clear
}
enum ContentMode {
case fit
case fill
}
}

View File

@ -1,8 +0,0 @@
import SwiftUI
// Deprecated: SVG runtime rendering removed. This view is a no-op placeholder.
struct SVGImageHtml: View {
let svgName: String
var body: some View {
Color.clear
}
}

View File

@ -0,0 +1,582 @@
//
// WakeVideoPlayer.swift
// wake
//
// Created by Cascade on 2025/9/12.
//
import SwiftUI
import AVKit
import VideoToolbox
/// Theme SwiftUI
/// /
public struct WakeVideoPlayer: View {
// MARK: - Public Config
private let url: URL
private let autoPlay: Bool
private let isLooping: Bool
private let showsControls: Bool
private let allowFullscreen: Bool
private let muteInitially: Bool
private let videoGravity: AVLayerVideoGravity
private let fallbackURL: URL?
// MARK: - Internal State
@State private var player: AVPlayer = AVPlayer()
@State private var isPlaying: Bool = false
@State private var isMuted: Bool = false
@State private var duration: Double = 0
@State private var currentTime: Double = 0
@State private var isScrubbing: Bool = false
@State private var isControlsVisible: Bool = true
@State private var isFullscreen: Bool = false
@State private var warningMessage: String?
@State private var timeObserverToken: Any?
@State private var endObserver: Any?
//
@State private var autoHideWorkItem: DispatchWorkItem?
public init(
url: URL,
autoPlay: Bool = true,
isLooping: Bool = false,
showsControls: Bool = true,
allowFullscreen: Bool = true,
muteInitially: Bool = false,
videoGravity: AVLayerVideoGravity = .resizeAspectFill,
fallbackURL: URL? = nil
) {
self.url = url
self.autoPlay = autoPlay
self.isLooping = isLooping
self.showsControls = showsControls
self.allowFullscreen = allowFullscreen
self.muteInitially = muteInitially
self.videoGravity = videoGravity
self.fallbackURL = fallbackURL
}
public var body: some View {
ZStack {
VideoPlayerRepresentable(player: player, videoGravity: videoGravity)
.background(Color.black)
.onTapGesture { toggleControls() }
if showsControls && isControlsVisible {
controlsOverlay
.transition(.opacity)
}
}
.onAppear(perform: setup)
.onDisappear(perform: cleanup)
.fullScreenCover(isPresented: $isFullscreen) {
FullscreenContainer(
player: player,
isPlaying: $isPlaying,
isMuted: $isMuted,
duration: $duration,
currentTime: $currentTime,
isScrubbing: $isScrubbing,
onTogglePlay: togglePlay,
onSeek: seek(to:),
onMute: toggleMute,
onDismiss: { isFullscreen = false },
videoGravity: videoGravity
)
}
}
}
// MARK: - UI
private extension WakeVideoPlayer {
var controlsOverlay: some View {
VStack(spacing: 0) {
// Top gradient (/)
LinearGradient(colors: [Color.black.opacity(0.35), .clear], startPoint: .top, endPoint: .bottom)
.frame(height: 80)
.frame(maxWidth: .infinity)
.allowsHitTesting(false)
if let warningMessage {
HStack(spacing: 8) {
Image(systemName: "exclamationmark.triangle.fill")
.foregroundColor(.black)
Text(warningMessage)
.font(.caption2)
.foregroundColor(.black)
.lineLimit(3)
.multilineTextAlignment(.leading)
}
.padding(8)
.background(Theme.Colors.warning.opacity(0.95))
.cornerRadius(8)
.padding(.horizontal, Theme.Spacing.lg)
.padding(.top, Theme.Spacing.md)
}
Spacer()
// Center play/pause button便
Button(action: togglePlay) {
Image(systemName: isPlaying ? "pause.circle.fill" : "play.circle.fill")
.resizable()
.frame(width: 64, height: 64)
.foregroundColor(.white)
.shadow(color: Theme.Shadows.large, radius: 12, x: 0, y: 8)
}
.padding(.bottom, Theme.Spacing.lg)
// Bottom bar controls
VStack(spacing: Theme.Spacing.sm) {
HStack {
Text(formatTime(currentTime))
.font(.caption)
.foregroundColor(.white.opacity(0.85))
.frame(width: 46, alignment: .leading)
Slider(value: Binding(
get: { currentTime },
set: { newVal in
currentTime = min(max(0, newVal), duration)
}
), in: 0...max(duration, 0.01), onEditingChanged: { editing in
isScrubbing = editing
if !editing { seek(to: currentTime) }
})
.tint(Theme.Colors.primary)
Text(formatTime(duration))
.font(.caption)
.foregroundColor(.white.opacity(0.85))
.frame(width: 46, alignment: .trailing)
}
HStack(spacing: Theme.Spacing.lg) {
Button(action: toggleMute) {
Image(systemName: isMuted ? "speaker.slash.fill" : "speaker.wave.2.fill")
.font(.system(size: 16, weight: .semibold))
.foregroundColor(.white)
}
Spacer()
if allowFullscreen {
Button(action: { isFullscreen = true }) {
Image(systemName: "arrow.up.left.and.down.right.magnifyingglass")
.font(.system(size: 16, weight: .semibold))
.foregroundColor(.white)
}
}
}
}
.padding(.horizontal, Theme.Spacing.lg)
.padding(.top, Theme.Spacing.sm)
.padding(.bottom, Theme.Spacing.lg + 4)
.background(
LinearGradient(colors: [Color.black.opacity(0.0), Color.black.opacity(0.55)], startPoint: .top, endPoint: .bottom)
.edgesIgnoringSafeArea(.bottom)
)
}
.onAppear { scheduleAutoHideIfNeeded() }
.onChange(of: isPlaying) { _, _ in scheduleAutoHideIfNeeded() }
}
}
// MARK: - Lifecycle & Player Setup
private extension WakeVideoPlayer {
func setup() {
//
Task { @MainActor in
let srcURL = url
let asset = AVURLAsset(url: srcURL)
do {
let (hasVideo, hasHEVC) = try await analyzeAsset(asset)
//
if hasVideo && hasHEVC && !isHEVCHardwareDecodeSupported() {
warningMessage = "当前运行环境不支持 HEVC 硬解码(模拟器常见)。已尝试使用备用码流。"
}
if hasVideo && hasHEVC && !isHEVCHardwareDecodeSupported(), let fallback = fallbackURL {
prepare(with: fallback)
} else {
prepare(with: srcURL)
}
} catch {
//
prepare(with: srcURL)
}
}
}
func prepare(with sourceURL: URL) {
//
if let token = timeObserverToken {
player.removeTimeObserver(token)
timeObserverToken = nil
}
if let endObs = endObserver {
NotificationCenter.default.removeObserver(endObs)
endObserver = nil
}
let item = AVPlayerItem(url: sourceURL)
player.replaceCurrentItem(with: item)
player.isMuted = muteInitially
isMuted = muteInitially
player.automaticallyWaitsToMinimizeStalling = true
player.allowsExternalPlayback = false
//
let cmDuration = item.asset.duration.secondsNonNaN
if cmDuration.isFinite { duration = cmDuration }
//
addTimeObserver()
//
if isLooping {
endObserver = NotificationCenter.default.addObserver(
forName: .AVPlayerItemDidPlayToEndTime,
object: item,
queue: .main
) { _ in
player.seek(to: .zero)
if autoPlay { player.play() }
}
}
//
if autoPlay {
player.play()
isPlaying = true
scheduleAutoHideIfNeeded()
}
}
func cleanup() {
if let token = timeObserverToken {
player.removeTimeObserver(token)
timeObserverToken = nil
}
if let endObs = endObserver {
NotificationCenter.default.removeObserver(endObs)
endObserver = nil
}
autoHideWorkItem?.cancel()
autoHideWorkItem = nil
player.pause()
}
func addTimeObserver() {
// 0.5s
let interval = CMTime(seconds: 0.5, preferredTimescale: CMTimeScale(NSEC_PER_SEC))
timeObserverToken = player.addPeriodicTimeObserver(forInterval: interval, queue: .main) { time in
guard !isScrubbing else { return }
currentTime = time.secondsNonNaN
if duration <= 0 {
if let cm = player.currentItem?.duration {
let total = cm.secondsNonNaN
if total.isFinite { duration = total }
}
}
}
}
}
// MARK: - Actions
private extension WakeVideoPlayer {
func togglePlay() {
if isPlaying {
player.pause()
isPlaying = false
showControls()
} else {
player.play()
isPlaying = true
scheduleAutoHideIfNeeded()
}
}
func toggleMute() {
isMuted.toggle()
player.isMuted = isMuted
showControls()
scheduleAutoHideIfNeeded()
}
func seek(to seconds: Double) {
let clamped = min(max(0, seconds), max(duration, 0))
let time = CMTime(seconds: clamped, preferredTimescale: 600)
player.seek(to: time, toleranceBefore: .zero, toleranceAfter: .zero)
if isPlaying { scheduleAutoHideIfNeeded() }
}
func toggleControls() {
withAnimation(.easeInOut(duration: 0.2)) {
isControlsVisible.toggle()
}
if isControlsVisible { scheduleAutoHideIfNeeded() }
}
func showControls() {
withAnimation(.easeInOut(duration: 0.2)) { isControlsVisible = true }
}
func scheduleAutoHideIfNeeded() {
autoHideWorkItem?.cancel()
guard showsControls && isPlaying else { return }
let work = DispatchWorkItem {
withAnimation(.easeOut(duration: 0.25)) { isControlsVisible = false }
}
autoHideWorkItem = work
DispatchQueue.main.asyncAfter(deadline: .now() + 2.5, execute: work)
}
}
// MARK: - Helpers
private extension WakeVideoPlayer {
func isHEVCHardwareDecodeSupported() -> Bool {
#if targetEnvironment(simulator)
return false
#else
return VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC)
#endif
}
func analyzeAsset(_ asset: AVAsset) async throws -> (hasVideo: Bool, hasHEVC: Bool) {
let tracks = try await asset.load(.tracks)
var hasVideo = false
var hasHEVC = false
for track in tracks {
// mediaType 访
if track.mediaType == .video {
hasVideo = true
let fds: [CMFormatDescription] = try await track.load(.formatDescriptions)
for desc in fds {
let subtype = CMFormatDescriptionGetMediaSubType(desc)
if subtype == kCMVideoCodecType_HEVC { hasHEVC = true }
}
}
}
return (hasVideo, hasHEVC)
}
func formatTime(_ seconds: Double) -> String {
guard seconds.isFinite && !seconds.isNaN else { return "00:00" }
let total = Int(seconds)
let h = total / 3600
let m = (total % 3600) / 60
let s = (total % 60)
if h > 0 {
return String(format: "%02d:%02d:%02d", h, m, s)
} else {
return String(format: "%02d:%02d", m, s)
}
}
}
// MARK: - Representable: videoGravity
private struct VideoPlayerRepresentable: UIViewControllerRepresentable {
let player: AVPlayer
let videoGravity: AVLayerVideoGravity
func makeUIViewController(context: Context) -> AVPlayerViewController {
let vc = AVPlayerViewController()
vc.player = player
vc.showsPlaybackControls = false
vc.videoGravity = videoGravity
return vc
}
func updateUIViewController(_ uiViewController: AVPlayerViewController, context: Context) {
uiViewController.player = player
uiViewController.videoGravity = videoGravity
}
}
// MARK: - Fullscreen Container
private struct FullscreenContainer: View {
let player: AVPlayer
@Binding var isPlaying: Bool
@Binding var isMuted: Bool
@Binding var duration: Double
@Binding var currentTime: Double
@Binding var isScrubbing: Bool
let onTogglePlay: () -> Void
let onSeek: (Double) -> Void
let onMute: () -> Void
let onDismiss: () -> Void
let videoGravity: AVLayerVideoGravity
@State private var isControlsVisible: Bool = true
@State private var autoHideWorkItem: DispatchWorkItem?
var body: some View {
ZStack {
VideoPlayerRepresentable(player: player, videoGravity: videoGravity)
.ignoresSafeArea()
.background(Color.black)
.onTapGesture { toggleControls() }
if isControlsVisible {
VStack(spacing: 0) {
HStack {
Button(action: onDismiss) {
Image(systemName: "xmark.circle.fill")
.font(.system(size: 24, weight: .semibold))
.foregroundColor(.white)
.shadow(color: Theme.Shadows.large, radius: 12, x: 0, y: 8)
}
Spacer()
}
.padding(.horizontal, Theme.Spacing.lg)
.padding(.top, Theme.Spacing.lg)
.padding(.bottom, Theme.Spacing.md)
.background(
LinearGradient(colors: [Color.black.opacity(0.55), .clear], startPoint: .top, endPoint: .bottom)
.ignoresSafeArea(edges: .top)
)
Spacer()
VStack(spacing: Theme.Spacing.sm) {
HStack {
Text(formatTime(currentTime))
.font(.caption)
.foregroundColor(.white.opacity(0.85))
.frame(width: 46, alignment: .leading)
Slider(value: Binding(
get: { currentTime },
set: { newVal in
currentTime = min(max(0, newVal), duration)
}
), in: 0...max(duration, 0.01), onEditingChanged: { editing in
isScrubbing = editing
if !editing { onSeek(currentTime) }
})
.tint(Theme.Colors.primary)
Text(formatTime(duration))
.font(.caption)
.foregroundColor(.white.opacity(0.85))
.frame(width: 46, alignment: .trailing)
}
HStack(spacing: Theme.Spacing.lg) {
Button(action: onTogglePlay) {
Image(systemName: isPlaying ? "pause.fill" : "play.fill")
.font(.system(size: 16, weight: .semibold))
.foregroundColor(.white)
}
Button(action: onMute) {
Image(systemName: isMuted ? "speaker.slash.fill" : "speaker.wave.2.fill")
.font(.system(size: 16, weight: .semibold))
.foregroundColor(.white)
}
Spacer()
}
}
.padding(.horizontal, Theme.Spacing.lg)
.padding(.top, Theme.Spacing.sm)
.padding(.bottom, Theme.Spacing.lg + 4)
.background(
LinearGradient(colors: [Color.black.opacity(0.0), Color.black.opacity(0.7)], startPoint: .top, endPoint: .bottom)
.ignoresSafeArea(edges: .bottom)
)
}
.transition(.opacity)
}
}
.onAppear { scheduleAutoHideIfNeeded() }
.onChange(of: isPlaying) { _, _ in scheduleAutoHideIfNeeded() }
}
func toggleControls() {
withAnimation(.easeInOut(duration: 0.2)) {
isControlsVisible.toggle()
}
if isControlsVisible { scheduleAutoHideIfNeeded() }
}
func scheduleAutoHideIfNeeded() {
autoHideWorkItem?.cancel()
guard isPlaying else { return }
let work = DispatchWorkItem {
withAnimation(.easeOut(duration: 0.25)) { isControlsVisible = false }
}
autoHideWorkItem = work
DispatchQueue.main.asyncAfter(deadline: .now() + 2.5, execute: work)
}
func formatTime(_ seconds: Double) -> String {
guard seconds.isFinite && !seconds.isNaN else { return "00:00" }
let total = Int(seconds)
let h = total / 3600
let m = (total % 3600) / 60
let s = (total % 60)
if h > 0 {
return String(format: "%02d:%02d:%02d", h, m, s)
} else {
return String(format: "%02d:%02d", m, s)
}
}
}
// MARK: - CMTime helpers
private extension CMTime {
var secondsNonNaN: Double {
let s = CMTimeGetSeconds(self)
if s.isNaN || s.isInfinite { return 0 }
return s
}
}
// MARK: - Preview
#Preview("WakeVideoPlayer - Basic") {
VStack(spacing: 16) {
Text("WakeVideoPlayer 预览")
.font(.headline)
WakeVideoPlayer(
url: URL(string: "https://cdn.memorywake.com/users/7350439663116619888/files/7361241959983353857/7361241920703696897.mp4")!,
autoPlay: false,
isLooping: true,
showsControls: true,
allowFullscreen: true,
muteInitially: true,
videoGravity: .resizeAspectFill
)
.frame(height: 220)
.clipShape(RoundedRectangle(cornerRadius: Theme.CornerRadius.large, style: .continuous))
.shadow(color: Theme.Shadows.cardShadow.color, radius: Theme.Shadows.cardShadow.radius, x: Theme.Shadows.cardShadow.x, y: Theme.Shadows.cardShadow.y)
.padding()
}
.background(Theme.Colors.background)
}
#Preview("WakeVideoPlayer - HLS (Primary)") {
VStack(spacing: 16) {
Text("WakeVideoPlayer HLS 主播放 预览")
.font(.headline)
WakeVideoPlayer(
url: URL(string: "https://test-streams.mux.dev/x36xhzz/x36xhzz.m3u8")!,
autoPlay: false,
isLooping: true,
showsControls: true,
allowFullscreen: true,
muteInitially: true,
videoGravity: .resizeAspect
)
.frame(height: 220)
.clipShape(RoundedRectangle(cornerRadius: Theme.CornerRadius.large, style: .continuous))
.shadow(color: Theme.Shadows.cardShadow.color, radius: Theme.Shadows.cardShadow.radius, x: Theme.Shadows.cardShadow.x, y: Theme.Shadows.cardShadow.y)
.padding()
}
.background(Theme.Colors.background)
}

View File

@ -98,7 +98,7 @@ struct UserProfileModal: View {
}
}
.onTapGesture {
Router.shared.navigate(to: .userInfo)
Router.shared.navigate(to: .userInfo(createFirstBlindBox: false))
}
} else {
Image(systemName: "person.circle.fill")

View File

@ -264,7 +264,7 @@ struct LoginView: View {
self.showError = true
}
// userinfo
Router.shared.navigate(to: .userInfo)
Router.shared.navigate(to: .userInfo(createFirstBlindBox: true))
case .failure(let error):
print("❌ [15] 后端认证失败")

View File

@ -26,6 +26,8 @@ struct MediaUploadView: View {
@State private var uploadComplete = false
/// ID
@State private var uploadedFileIds: [[String: String]] = []
/// Continue
@State private var isSubmitting: Bool = false
// MARK: -
@ -145,18 +147,32 @@ struct MediaUploadView: View {
///
private var continueButton: some View {
Button(action: handleContinue) {
Text("Continue")
let uploading = isUploading()
let hasSelection = !uploadManager.selectedMedia.isEmpty
let canProceed = uploadManager.isAllUploaded
let isLoading = uploading || isSubmitting
return Button(action: handleContinue) {
HStack(spacing: 8) {
if isLoading {
ProgressView()
.progressViewStyle(CircularProgressViewStyle())
}
Text(uploading ? "Uploading..." : (isSubmitting ? "Processing..." : "Continue"))
.font(.headline)
.foregroundColor(uploadManager.selectedMedia.isEmpty ? Color.themeTextMessage : Color.themeTextMessageMain)
}
.foregroundColor(hasSelection ? Color.themeTextMessageMain : Color.themeTextMessage)
.frame(maxWidth: .infinity)
.frame(height: 56)
.background(uploadManager.selectedMedia.isEmpty ? Color.white : Color.themePrimary)
.background(
hasSelection
? (isSubmitting ? Color.gray.opacity(0.3) : Color.themePrimary)
: Color.white
)
.cornerRadius(28)
.padding(.horizontal, 24)
}
.buttonStyle(PlainButtonStyle())
.disabled(uploadManager.selectedMedia.isEmpty)
.disabled(!canProceed || isSubmitting)
}
///
@ -315,6 +331,9 @@ struct MediaUploadView: View {
///
private func handleContinue() {
//
guard !isSubmitting else { return }
//
let uploadResults = uploadManager.uploadResults
guard !uploadResults.isEmpty else {
@ -332,6 +351,8 @@ struct MediaUploadView: View {
// id
Task {
isSubmitting = true
defer { isSubmitting = false }
do {
let materialIds = try await MaterialUpload.shared.addMaterials(files: files)
print("🚀 素材ID: \(materialIds ?? [])")

View File

@ -1,6 +1,8 @@
import SwiftUI
struct UserInfo: View {
let createFirstBlindBox: Bool
@Environment(\.dismiss) private var dismiss
@StateObject private var router = Router.shared
@ -44,9 +46,10 @@ struct UserInfo: View {
.map { _ in false })
.receive(on: RunLoop.main)
init() {
init(createFirstBlindBox: Bool) {
//
_ = UserInfo.keyboardPreloader
self.createFirstBlindBox = createFirstBlindBox
}
var body: some View {
@ -166,7 +169,7 @@ struct UserInfo: View {
Button(action: {
if showUsername {
let parameters: [String: Any] = [
"username": userName,
"user_name": userName,
"avatar_file_id": uploadedFileId ?? ""
]
@ -182,6 +185,7 @@ struct UserInfo: View {
self.userName = userData.username
}
if createFirstBlindBox {
//
MaterialUpload.shared.addMaterial(
fileId: uploadedFileId ?? "",
@ -208,6 +212,9 @@ struct UserInfo: View {
print("素材添加失败: \(error.localizedDescription)")
}
}
} else {
Router.shared.navigate(to: .blindBox(mediaType: .all))
}
case .failure(let error):
print("❌ 用户信息更新失败: \(error.localizedDescription)")
self.errorMessage = "更新失败: \(error.localizedDescription)"
@ -327,6 +334,6 @@ struct SettingsRow: View {
// MARK: - Preview
struct UserInfo_Previews: PreviewProvider {
static var previews: some View {
UserInfo()
UserInfo(createFirstBlindBox: false)
}
}

View File

@ -2,6 +2,7 @@ import SwiftUI
struct JoinModal: View {
@Binding var isPresented: Bool
let onClose: () -> Void
var body: some View {
ZStack(alignment: .bottom) {
@ -26,7 +27,7 @@ struct JoinModal: View {
Spacer()
Button(action: {
withAnimation {
Router.shared.navigate(to: .blindBox(mediaType: .all))
onClose()
}
}) {
Image(systemName: "xmark")
@ -246,6 +247,6 @@ private struct JoinListMark: View {
struct JoinModal_Previews: PreviewProvider {
static var previews: some View {
JoinModal(isPresented: .constant(true))
JoinModal(isPresented: .constant(true), onClose: {})
}
}