wake-ios/wake/View/Memories/MemoriesView.swift
2025-08-28 19:34:13 +08:00

191 lines
6.1 KiB
Swift

import SwiftUI
import AVKit
// MARK: - API Response Models
struct MaterialResponse: Decodable {
let code: Int
let data: MaterialData
struct MaterialData: Decodable {
let items: [MemoryItem]
}
}
struct MemoryItem: Identifiable, Decodable {
let id: String
let name: String
let description: String
let fileInfo: FileInfo
var title: String { name }
var subtitle: String { description }
var mediaType: MemoryMediaType { .image(fileInfo.url) }
var aspectRatio: CGFloat { 1.0 } // Default to square, adjust based on actual image dimensions if needed
enum CodingKeys: String, CodingKey {
case id, name, description
case fileInfo = "file_info"
}
}
struct FileInfo: Decodable {
let id: String
let fileName: String
let url: String
enum CodingKeys: String, CodingKey {
case id
case fileName = "file_name"
case url
}
}
enum MemoryMediaType: Equatable {
case image(String)
case video(String)
}
struct MemoriesView: View {
@State private var memories: [MemoryItem] = []
@State private var isLoading = false
@State private var errorMessage: String?
let columns = [
GridItem(.flexible(), spacing: 1),
GridItem(.flexible(), spacing: 1)
]
var body: some View {
NavigationView {
Group {
if isLoading {
ProgressView()
.scaleEffect(1.5)
} else if let error = errorMessage {
Text("Error: \(error)")
.foregroundColor(.red)
} else {
ScrollView {
LazyVGrid(columns: columns, spacing: 4) {
ForEach(memories) { memory in
MemoryCard(memory: memory)
.padding(.horizontal, 2)
}
}
.padding(.top, 4)
.padding(.horizontal, 4)
}
}
}
.navigationTitle("My Memories")
.navigationBarTitleDisplayMode(.inline)
.onAppear {
fetchMemories()
}
}
}
private func fetchMemories() {
isLoading = true
errorMessage = nil
NetworkService.shared.get(path: "/material/list") { [self] (result: Result<MaterialResponse, NetworkError>) in
DispatchQueue.main.async { [self] in
self.isLoading = false
switch result {
case .success(let response):
print("✅ Successfully fetched \(response.data.items.count) memory items")
response.data.items.forEach { item in
print("📝 Item ID: \(item.id), Title: \(item.name), URL: \(item.fileInfo.url)")
}
self.memories = response.data.items
case .failure(let error):
self.errorMessage = error.localizedDescription
print("❌ Failed to fetch memories: \(error.localizedDescription)")
}
}
}
}
}
struct MemoryCard: View {
let memory: MemoryItem
var body: some View {
VStack(alignment: .leading, spacing: 4) {
ZStack {
// Media content
Group {
switch memory.mediaType {
case .image(let urlString):
if let url = URL(string: urlString) {
AsyncImage(url: url) { phase in
if let image = phase.image {
image
.resizable()
.aspectRatio(contentMode: .fill)
} else if phase.error != nil {
Color.gray.opacity(0.3)
} else {
ProgressView()
}
}
}
case .video(let urlString):
if let url = URL(string: urlString) {
VideoPlayer(player: AVPlayer(url: url))
.aspectRatio(memory.aspectRatio, contentMode: .fill)
.onAppear {
// The video will be shown with a play button overlay
// and will only play when tapped
}
} else {
Color.gray.opacity(0.3)
.aspectRatio(memory.aspectRatio, contentMode: .fill)
}
}
}
.frame(width: (UIScreen.main.bounds.width / 2) - 24, height: (UIScreen.main.bounds.width / 2 - 24) * (1/memory.aspectRatio))
.clipped()
.cornerRadius(12)
.overlay(
Group {
if case .video = memory.mediaType {
Image(systemName: "play.circle.fill")
.font(.system(size: 40))
.foregroundColor(.white.opacity(0.9))
}
}
)
}
// Title and Subtitle
VStack(alignment: .leading, spacing: 1) {
Text(memory.title)
.font(.subheadline)
.lineLimit(1)
Text(memory.subtitle)
.font(.caption)
.foregroundColor(.secondary)
.lineLimit(1)
}
.padding(.horizontal, 2)
.padding(.bottom, 4)
}
}
}
// Helper extension to pause video
private extension AVPlayer {
func pause() {
self.pause()
}
}
#Preview {
MemoriesView()
}