Commit f18e511d authored by CZ1004's avatar CZ1004

Merge branch 'Advertisement' of…

Merge branch 'Advertisement' of http://gitlab.zhangxindiet.com/ShuMing/phonemanager into Advertisement

* 'Advertisement' of http://gitlab.zhangxindiet.com/ShuMing/phonemanager:
  引入新相册资源管理类
  垃圾桶数据库
  引导页动画
  垃圾桶页面

# Conflicts:
#	PhoneManager/Class/Page/Home/View/DateSelectButtonView.swift
#	PhoneManager/Class/Page/Home/View/ResourceFilterBoxView.swift
#	PhoneManager/Class/Page/Home/View/YearMonthPickerView.swift
#	PhoneManager/Class/Page/Home/View/cell/ResourceFilterBoxTableViewCell.swift
parents 36d4c4e3 21848fa1
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"filename" : "Frame@2x.png",
"idiom" : "universal",
"scale" : "2x"
},
{
"filename" : "Frame@3x.png",
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
{
"images" : [
{
"idiom" : "universal",
"scale" : "1x"
},
{
"filename" : "Frame@2x.png",
"idiom" : "universal",
"scale" : "2x"
},
{
"filename" : "Frame@3x.png",
"idiom" : "universal",
"scale" : "3x"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}
//
// TrashDatabase.swift
// PhoneManager
//
// Created by edy on 2025/5/12.
//
import Foundation
import SQLite3
class TrashDatabase {
static let shared = TrashDatabase()
private var db: OpaquePointer?
private let dbPath: String
private init() {
let fileURL = try! FileManager.default
.url(for: .documentDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
.appendingPathComponent("trash.sqlite")
dbPath = fileURL.path
if sqlite3_open(dbPath, &db) != SQLITE_OK {
print("无法打开数据库")
return
}
createTable()
}
private func createTable() {
let createTableString = """
CREATE TABLE IF NOT EXISTS trash(
localIdentifier TEXT PRIMARY KEY,
assetSize DOUBLE,
createDate DOUBLE,
mediaType INTEGER
);
"""
var createTableStatement: OpaquePointer?
if sqlite3_prepare_v2(db, createTableString, -1, &createTableStatement, nil) == SQLITE_OK {
if sqlite3_step(createTableStatement) == SQLITE_DONE {
print("成功创建trash表")
} else {
print("创建表失败")
}
} else {
print("创建表语句准备失败")
}
sqlite3_finalize(createTableStatement)
}
// 插入数据(带去重检查)
func insert(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int) -> Bool {
// 先检查是否已存在相同的 localIdentifier
if let _ = query(localIdentifier: localIdentifier) {
// 如果已存在,则执行更新操作
return update(localIdentifier: localIdentifier, assetSize: assetSize, createDate: createDate, mediaType: mediaType)
}
// 如果不存在,执行插入操作
let insertStatementString = "INSERT INTO trash (localIdentifier, assetSize, createDate, mediaType) VALUES (?, ?, ?, ?);"
var insertStatement: OpaquePointer?
if sqlite3_prepare_v2(db, insertStatementString, -1, &insertStatement, nil) == SQLITE_OK {
sqlite3_bind_text(insertStatement, 1, (localIdentifier as NSString).utf8String, -1, nil)
sqlite3_bind_double(insertStatement, 2, assetSize)
sqlite3_bind_double(insertStatement, 3, createDate.timeIntervalSince1970)
sqlite3_bind_int(insertStatement, 4, Int32(mediaType))
if sqlite3_step(insertStatement) == SQLITE_DONE {
print("成功插入数据")
sqlite3_finalize(insertStatement)
return true
} else {
print("插入数据失败")
}
}
sqlite3_finalize(insertStatement)
return false
}
// 删除数据
func delete(localIdentifier: String) -> Bool {
let deleteStatementString = "DELETE FROM trash WHERE localIdentifier = ?;"
var deleteStatement: OpaquePointer?
if sqlite3_prepare_v2(db, deleteStatementString, -1, &deleteStatement, nil) == SQLITE_OK {
sqlite3_bind_text(deleteStatement, 1, (localIdentifier as NSString).utf8String, -1, nil)
if sqlite3_step(deleteStatement) == SQLITE_DONE {
print("成功删除数据")
sqlite3_finalize(deleteStatement)
return true
} else {
print("删除数据失败")
}
}
sqlite3_finalize(deleteStatement)
return false
}
// 更新数据
func update(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int) -> Bool {
let updateStatementString = "UPDATE trash SET assetSize = ?, createDate = ?, mediaType = ? WHERE localIdentifier = ?;"
var updateStatement: OpaquePointer?
if sqlite3_prepare_v2(db, updateStatementString, -1, &updateStatement, nil) == SQLITE_OK {
sqlite3_bind_double(updateStatement, 1, assetSize)
sqlite3_bind_double(updateStatement, 2, createDate.timeIntervalSince1970)
sqlite3_bind_int(updateStatement, 3, Int32(mediaType))
sqlite3_bind_text(updateStatement, 4, (localIdentifier as NSString).utf8String, -1, nil)
if sqlite3_step(updateStatement) == SQLITE_DONE {
print("成功更新数据")
sqlite3_finalize(updateStatement)
return true
} else {
print("更新数据失败")
}
}
sqlite3_finalize(updateStatement)
return false
}
// 查询所有数据
func queryAll() -> [(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int)] {
let queryStatementString = "SELECT * FROM trash;"
var queryStatement: OpaquePointer?
var result: [(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int)] = []
if sqlite3_prepare_v2(db, queryStatementString, -1, &queryStatement, nil) == SQLITE_OK {
while sqlite3_step(queryStatement) == SQLITE_ROW {
let localIdentifier = String(cString: sqlite3_column_text(queryStatement, 0))
let assetSize = sqlite3_column_double(queryStatement, 1)
let createDate = Date(timeIntervalSince1970: sqlite3_column_double(queryStatement, 2))
let mediaType = Int(sqlite3_column_int(queryStatement, 3))
result.append((localIdentifier, assetSize, createDate, mediaType))
}
}
sqlite3_finalize(queryStatement)
return result
}
// 根据localIdentifier查询单条数据
func query(localIdentifier: String) -> (localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int)? {
let queryStatementString = "SELECT * FROM trash WHERE localIdentifier = ?;"
var queryStatement: OpaquePointer?
if sqlite3_prepare_v2(db, queryStatementString, -1, &queryStatement, nil) == SQLITE_OK {
sqlite3_bind_text(queryStatement, 1, (localIdentifier as NSString).utf8String, -1, nil)
if sqlite3_step(queryStatement) == SQLITE_ROW {
let localIdentifier = String(cString: sqlite3_column_text(queryStatement, 0))
let assetSize = sqlite3_column_double(queryStatement, 1)
let createDate = Date(timeIntervalSince1970: sqlite3_column_double(queryStatement, 2))
let mediaType = Int(sqlite3_column_int(queryStatement, 3))
sqlite3_finalize(queryStatement)
return (localIdentifier, assetSize, createDate, mediaType)
}
}
sqlite3_finalize(queryStatement)
return nil
}
// 根据mediaType查询数据
func queryByMediaType(_ mediaType: Int) -> [(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int)] {
let queryStatementString = "SELECT * FROM trash WHERE mediaType = ?;"
var queryStatement: OpaquePointer?
var result: [(localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int)] = []
if sqlite3_prepare_v2(db, queryStatementString, -1, &queryStatement, nil) == SQLITE_OK {
sqlite3_bind_int(queryStatement, 1, Int32(mediaType))
while sqlite3_step(queryStatement) == SQLITE_ROW {
let localIdentifier = String(cString: sqlite3_column_text(queryStatement, 0))
let assetSize = sqlite3_column_double(queryStatement, 1)
let createDate = Date(timeIntervalSince1970: sqlite3_column_double(queryStatement, 2))
let mediaType = Int(sqlite3_column_int(queryStatement, 3))
result.append((localIdentifier, assetSize, createDate, mediaType))
}
}
sqlite3_finalize(queryStatement)
return result
}
deinit {
sqlite3_close(db)
}
}
//
// ActorManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import UIKit
// 图片状态管理 actor
actor PhotoSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
// 截图状态管理 actor
actor ScreenshotSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
// 视频状态管理 actor
actor VideoSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
actor PhotoDuplicateStateManager {
private var duplicateGroups: [DuplicateGroupModel] = []
private var pendingDuplicateGroups: [DuplicateGroupModel] = []
// 缓存
private var imageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
// MARK: - 公共方法
func loadStoredData(duplicateGroups: [DuplicateGroupModel]) {
self.duplicateGroups = duplicateGroups
}
func getAllDuplicateGroups() -> [DuplicateGroupModel] {
return duplicateGroups
}
func appendDuplicateGroup(_ group: DuplicateGroupModel) {
pendingDuplicateGroups.append(group)
}
func shouldSavePendingGroups() -> Bool {
return pendingDuplicateGroups.count >= 5
}
func savePendingGroups() {
duplicateGroups.append(contentsOf: pendingDuplicateGroups)
pendingDuplicateGroups.removeAll()
}
func getPendingDuplicateGroups() -> [DuplicateGroupModel] {
return pendingDuplicateGroups
}
// MARK: - 缓存相关
func getCachedImage(for identifier: String) -> UIImage? {
return imageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
imageCache[identifier] = image
}
func getCachedHash(for identifier: String) -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) {
hashCache[identifier] = hash
}
}
//
// PhotoDuplicateManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/8.
//
import Foundation
import Photos
import UIKit
@MainActor
class PhotoDuplicateManager: @unchecked Sendable {
static let shared = PhotoDuplicateManager()
private let stateManager = PhotoDuplicateStateManager() // 使用新的状态管理器
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("DuplicateTimeGroups.json").path
}
private var duplicateGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("DuplicateGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "DuplicateLatestPhotoTimestamp"
func findDuplicateAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 通知已缓存的结果
let cachedGroups = await stateManager.getAllDuplicateGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 按分辨率预分组
var resolutionGroups: [[PHAsset]] = []
var tempGroups: [String: [PHAsset]] = [:] // 临时用于分组
// 第一次遍历:收集相同分辨率的资源
for asset in assets {
let resolution = "\(asset.pixelWidth)x\(asset.pixelHeight)"
if tempGroups[resolution] == nil {
tempGroups[resolution] = []
}
tempGroups[resolution]?.append(asset)
}
// 第二次遍历:只保留有多个资源的组
resolutionGroups = tempGroups.values.filter { $0.count > 1 }
// 如果没有需要处理的组,直接返回缓存结果
if resolutionGroups.isEmpty {
let total = cachedGroups.map { $0.assets }
await MainActor.run {
completionHandler?(total)
}
return
}
let maxConcurrency = 4 // 最大并发数
let batchSize = max(1, resolutionGroups.count / maxConcurrency)
for batchIndex in stride(from: 0, to: resolutionGroups.count, by: batchSize) {
let endIndex = min(batchIndex + batchSize, resolutionGroups.count)
let batch = Array(resolutionGroups[batchIndex..<endIndex])
await withTaskGroup(of: Void.self) { group in
for assets in batch {
group.addTask { [weak self] in
guard let self = self else { return }
// 5.1 计算该组所有图片的hash
var hashGroups: [String: [PHAsset]] = [:]
for asset in assets {
if let hash = await self.getOrCalculateHash(for: asset) {
if hashGroups[hash] == nil {
hashGroups[hash] = []
}
hashGroups[hash]?.append(asset)
}
}
// 5.2 找出完全相同的组
let duplicateGroups = hashGroups.values.filter { group in
group.count > 1 && self.areAssetsExactlyDuplicate(group)
}
// 5.3 处理找到的重复组
for duplicateGroup in duplicateGroups {
let groupId = UUID().uuidString
let assetModels = await self.createAssetModels(from: duplicateGroup)
// 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 保存重复组
await self.stateManager.appendDuplicateGroup(
DuplicateGroupModel(groupId: groupId, assets: assetModels)
)
if await self.stateManager.shouldSavePendingGroups() {
await self.savePendingDuplicateGroups()
}
}
}
}
}
}
// 6. 完成处理
if await !stateManager.getPendingDuplicateGroups().isEmpty {
await self.savePendingDuplicateGroups()
}
let allGroups = await stateManager.getAllDuplicateGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
// MARK: - 辅助方法
nonisolated private func areAssetsExactlyDuplicate(_ assets: [PHAsset]) -> Bool {
guard let firstAsset = assets.first else { return false }
return assets.allSatisfy { asset in
// 检查分辨率完全相同
if asset.pixelWidth != firstAsset.pixelWidth ||
asset.pixelHeight != firstAsset.pixelHeight {
return false
}
// 检查文件大小完全相同
let firstSize = getAssetSize(firstAsset)
let currentSize = getAssetSize(asset)
if firstSize != currentSize {
return false
}
// 检查创建时间是否接近(避免连拍照片)
if let time1 = asset.creationDate,
let time2 = firstAsset.creationDate {
let timeDiff = abs(time1.timeIntervalSince(time2))
if timeDiff < 1.0 { // 1秒内的连拍照片不算重复
return false
}
}
return true
}
}
nonisolated private func getAssetSize(_ asset: PHAsset) -> Int64 {
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
return size
}
return 0
}
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
let size = self.getAssetSize(asset)
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: Double(size),
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 存储相关方法
extension PhotoDuplicateManager {
private func loadStoredData() async {
var loadedDuplicateGroups: [DuplicateGroupModel] = []
if let data = try? Data(contentsOf: URL(fileURLWithPath: duplicateGroupsPath)),
let groups = try? JSONDecoder().decode([DuplicateGroupModel].self, from: data) {
loadedDuplicateGroups = groups
}
await stateManager.loadStoredData(duplicateGroups: loadedDuplicateGroups)
}
private func savePendingDuplicateGroups() async {
await stateManager.savePendingGroups()
if let data = try? JSONEncoder().encode(await stateManager.getAllDuplicateGroups()) {
try? data.write(to: URL(fileURLWithPath: duplicateGroupsPath))
}
}
}
// MARK: - Hash计算相关方法
extension PhotoDuplicateManager {
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let hash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(hash, for: asset.localIdentifier)
}
continuation.resume(returning: hash)
} else {
continuation.resume(returning: nil)
}
}
}
}
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
}
//
// PhotoManager.swift
// PhotoManager
//
// Created by edy on 2025/4/22.
//
import Foundation
import Photos
import AVFoundation
import CoreML
import Vision
import UIKit
class PhotoManager:ObservableObject{
static let shared = PhotoManager()
private init() {
requestAuthorization()
}
// 添加 Published 属性
@Published private(set) var baseDataLoadingState: BaseDataLoadingState = .notLoaded
// 定义状态枚举
enum BaseDataLoadingState {
case notLoaded
case loading
case loaded
case failed(Error)
}
// MARK: - 基础配置
// 所有的媒体资源
var allAssets:[PHAsset] = []
// 照片
@Published private(set) var photosAssets:[PHAsset] = []
// 截图
@Published private(set) var screenShotAssets:[PHAsset] = []
// 视频
@Published private(set) var videoAssets:[PHAsset] = []
// 其他
@Published private(set) var otherAssets:[PHAsset] = []
// 相似图片分组
var similarModels:[[AssetModel]] = []
// 相似截图分组
var similarScreenShotModels:[[AssetModel]] = []
// 相似视频分组
var similarVideoModels:[[AssetModel]] = []
// 重复图片分组
var duplicateModels:[[AssetModel]] = []
// 截图
var screenShotModels:[AssetModel] = []
// 视频
var videoModels:[AssetModel] = []
// 其他
var otherModels:[AssetModel] = []
@Published private(set) var screenShotTotalSize:Int64 = 0
@Published private(set) var videoTotalSize:Int64 = 0
@Published private(set) var otherTotalSize:Int64 = 0
private var currentPage: Int = 0
private let pageSize: Int = 50 // 每次加载的数量
// MARK: -基础函数
// 获取相册权限
func requestAuthorization(completion: @escaping (Bool) -> Void) {
PHPhotoLibrary.requestAuthorization { status in
DispatchQueue.main.async {
completion(status == .authorized)
}
}
}
private func requestAuthorization(){
// 获取基础数据
requestAuthorization {[weak self] _ in
guard let weakSelf = self else { return }
weakSelf.getBaseAssetGroup()
}
}
// 获取基本资源
func getBaseAssetGroup() {
baseDataLoadingState = .loading
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let photoAllAssets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
let videoAllAssets = PHAsset.fetchAssets(with: .video, options: fetchOptions)
let fetchOptionsS = PHFetchOptions()
fetchOptionsS.predicate = NSPredicate(format: "mediaSubtypes == %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
let screenShotAllAssets = PHAsset.fetchAssets(with: .image, options: fetchOptionsS)
let photoAssetsArray = photoAllAssets.objects(at: IndexSet(0..<photoAllAssets.count))
let videoAssetsArray = videoAllAssets.objects(at: IndexSet(0..<videoAllAssets.count))
let screenShotArray = screenShotAllAssets.objects(at: IndexSet(0..<screenShotAllAssets.count))
let otherArray = photoAssetsArray.filter {!screenShotArray.contains($0) }
print("基本数据执行完毕")
// 在主线程更新状态
DispatchQueue.main.async {
self.photosAssets = photoAssetsArray
self.videoAssets = videoAssetsArray
self.screenShotAssets = screenShotArray
self.otherAssets = otherArray
self.baseDataLoadingState = .loaded
}
}
}
// 转化视频模型和获取大小
func convertVideoModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.videoModels = await convertAssetsToModel(for: self.videoAssets, mediaType: 2)
let duration = CFAbsoluteTimeGetCurrent() - start
print("其他图片转换总耗时: \(duration)秒")
let videoTotalSize = Int64(self.videoModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.videoTotalSize = videoTotalSize
complectionHandler?(self.videoModels,videoTotalSize)
}
}
}
// 转化其他图片模型和获取大小
func convertOtherPhotoModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.otherModels = await convertAssetsToModel(for: self.otherAssets, mediaType: 1)
let duration = CFAbsoluteTimeGetCurrent() - start
print("其他图片转换总耗时: \(duration)秒")
let otherTotalSize = Int64(self.otherModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.otherTotalSize = otherTotalSize
complectionHandler?(self.otherModels,otherTotalSize)
}
}
}
// 转化截图模型和获取大小
func convertScreenShotModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.screenShotModels = await convertAssetsToModel(for: self.screenShotAssets, mediaType: 1)
let duration = CFAbsoluteTimeGetCurrent() - start
print("截图转换总耗时: \(duration)秒")
let screenShotTotalSize = Int64(self.screenShotModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.screenShotTotalSize = screenShotTotalSize
complectionHandler?(self.otherModels,screenShotTotalSize)
}
}
}
}
// MARK: - 工具函数支持
extension PhotoManager{
// 获取文件大小
func getAssetSize(for asset:PHAsset) ->Int64{
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
return size
} else {
return 0
}
}
// 请求照片数据的大小
private func requestImageData(for asset: PHAsset, completion: @escaping (Int64) -> Void) {
let options = PHImageRequestOptions()
options.isSynchronous = true // 设置为同步,以在回调前完成任务
options.version = .original
PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { (data, _, _, _) in
let dataSize = data?.count ?? 0
completion(Int64(dataSize))
}
}
// 请求视频数据的大小
private func getVideoAssetSize(_ asset: PHAsset, completion: @escaping (Int64) -> Void) {
let options = PHVideoRequestOptions()
// 获取原始数据
options.version = .original
// 不去计算iclound资源
options.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: options) { (avAsset, _, _) in
if let urlAsset = avAsset as? AVURLAsset {
let size = try? urlAsset.url.resourceValues(forKeys: [.fileSizeKey]).fileSize ?? 0
completion(Int64(size ?? 0))
} else {
completion(0)
}
}
}
// 根据 localIdentifier 获取图片
func getImage(localIdentifier: String, targetSize: CGSize = PHImageManagerMaximumSize, completion: @escaping (UIImage?) -> Void) {
// 根据 localIdentifier 获取 PHAsset
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = fetchResult.firstObject else {
completion(nil)
return
}
// 配置图片请求选项
let options = PHImageRequestOptions()
options.version = .current
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true // 允许从 iCloud 下载
options.resizeMode = .exact
// 请求图片
PHImageManager.default().requestImage(
for: asset,
targetSize: targetSize,
contentMode: .aspectFit,
options: options
) { image, info in
DispatchQueue.main.async {
completion(image)
}
}
}
// 获取视频url
func getVideoURL(localIdentifier: String, completion: @escaping (URL?) -> Void) {
// 1. 通过本地标识符获取 PHAsset
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = fetchResult.firstObject else {
completion(nil)
return
}
// 2. 确保资产是视频类型
guard asset.mediaType == .video else {
completion(nil)
return
}
// 3. 设置视频请求选项
let options = PHVideoRequestOptions()
options.version = .original
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true
// 4. 请求视频资源
PHImageManager.default().requestAVAsset(forVideo: asset, options: options) { (avAsset, _, _) in
DispatchQueue.main.async {
if let urlAsset = avAsset as? AVURLAsset {
completion(urlAsset.url)
} else {
completion(nil)
}
}
}
}
// 根据文件大小直接格式化
func formatBytes(_ bytes: Int64) -> String {
let formatter = ByteCountFormatter()
formatter.allowedUnits = [.useKB, .useMB, .useGB] // 选择合适的单位
formatter.countStyle = .file
return formatter.string(fromByteCount: bytes)
}
}
// MARK: - 数据函数支持
extension PhotoManager{
// 获取一定页数的图片
func fetchImages(page: Int, completion: @escaping ([PHAsset]) -> Void) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchOptions.fetchLimit = pageSize * (page + 1) // 增加请求的数量
let allImages = PHAsset.fetchAssets(with: .image, options: fetchOptions)
var images = [PHAsset]()
allImages.enumerateObjects { (asset, _, _) in
images.append(asset)
}
completion(images)
}
// 获取一定页数的视频
func fetchVideos(page: Int, completion: @escaping ([PHAsset]) -> Void) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchOptions.fetchLimit = pageSize * (page + 1) // 增加请求的数量
let allVideos = PHAsset.fetchAssets(with: .video, options: fetchOptions)
var videos = [PHAsset]()
allVideos.enumerateObjects { (asset, _, _) in
videos.append(asset)
}
completion(videos)
}
// 获取指定类型的媒体资源
func fetchMediaAssets(type: MediaType, completion: @escaping ([PHAsset]) -> Void) {
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
var assets: PHFetchResult<PHAsset>
switch type {
case .video:
assets = PHAsset.fetchAssets(with: .video, options: fetchOptions)
case .screenshot:
// 使用系统属性判断截图
fetchOptions.predicate = NSPredicate(format: "mediaSubtype == %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
case .photo:
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
case .other:
// 排除视频和截图
let predicates = [
NSPredicate(format: "mediaType == %d", PHAssetMediaType.image.rawValue),
NSPredicate(format: "mediaSubtype != %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
]
fetchOptions.predicate = NSCompoundPredicate(andPredicateWithSubpredicates: predicates)
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
}
var mediaAssets:[PHAsset] = []
assets.enumerateObjects {(asset, _, _) in
mediaAssets.append(asset)
}
DispatchQueue.main.async {
completion(mediaAssets)
}
}
}
// 获取所有资产的总数
func fetchTotalAssets(completion: @escaping ([PHAsset]) -> Void) {
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let allAssets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
var assets = [PHAsset]()
allAssets.enumerateObjects { (asset, _, _) in
assets.append(asset)
}
DispatchQueue.main.async {
self.allAssets = assets
completion(assets)
}
}
}
}
extension PhotoManager{
func convertAssetsToModel(for assets: [PHAsset], mediaType: Int) async -> [AssetModel] {
let batchSize = 4 // 控制并发数量
var results: [AssetModel] = []
// 分批处理资产
for batch in stride(from: 0, to: assets.count, by: batchSize) {
let end = min(batch + batchSize, assets.count)
let currentBatch = Array(assets[batch..<end])
await withTaskGroup(of: AssetModel.self) { group in
for asset in currentBatch {
group.addTask {
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: Double(self.getAssetSize(for: asset)),
createDate: asset.creationDate ?? Date(),
mediaType: mediaType
)
}
}
// 收集当前批次的结果
for await result in group {
results.append(result)
}
}
}
return results
}
func getModelsData(){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.screenShotModels = await convertAssetsToModel(for: self.screenShotAssets, mediaType: 1)
self.otherModels = await convertAssetsToModel(for: self.otherAssets, mediaType: 1)
self.videoModels = await convertAssetsToModel(for: self.videoAssets, mediaType: 2)
let duration = CFAbsoluteTimeGetCurrent() - start
print("转换总耗时: \(duration)秒")
let screenShotTotalSize = Int64(self.screenShotModels.reduce(0){$0+$1.assetSize})
let videoTotalSize = Int64(self.videoModels.reduce(0){$0+$1.assetSize})
let otherTotalSize = Int64(self.otherModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.screenShotTotalSize = screenShotTotalSize
self.videoTotalSize = videoTotalSize
self.otherTotalSize = otherTotalSize
}
}
}
}
//
// AssetModel.swift
// CleanPhoto
//
// Created by edy on 2025/5/7.
//
import Foundation
struct AssetModel :Codable,Hashable {
var localIdentifier : String
var assetSize : Double
var createDate : Date
var mediaType:Int // 1 图片 2视频
init(localIdentifier: String, assetSize: Double, createDate: Date,mediaType:Int = 1) {
self.localIdentifier = localIdentifier
self.assetSize = assetSize
self.createDate = createDate
self.mediaType = mediaType
}
func hash(into hasher: inout Hasher) {
hasher.combine(localIdentifier)
hasher.combine(assetSize)
hasher.combine(createDate)
hasher.combine(mediaType)
}
static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool {
return lhs.localIdentifier == rhs.localIdentifier &&
lhs.assetSize == rhs.assetSize &&
lhs.createDate == rhs.createDate && lhs.mediaType == rhs.mediaType
}
}
struct AssetFileModel:Codable{
var videoAssets:[AssetModel] = []
var otherAssets:[AssetModel] = []
var screenShotAssets:[AssetModel] = []
var photosAssets:[AssetModel] = []
}
// 添加媒体类型枚举
enum MediaType {
case video
case screenshot
case photo
case other
}
extension MediaType{
var dbValue:String{
switch self {
case .video:
return "video"
case .screenshot:
return "screenshot"
case .photo:
return "photo"
case .other:
return "other"
}
}
}
// 时间组模型
struct TimeGroupModel: Codable {
let groupId: String
let startTime: TimeInterval
let endTime: TimeInterval
var isProcessed: Bool
}
// 相似图片组模型
struct SimilarGroupModel: Codable {
let groupId: String
var assets: [AssetModel]
}
// 重复图片组模型
struct DuplicateGroupModel: Codable {
let groupId: String
let assets: [AssetModel]
}
//
// PhotoSimilarManager.swift
// PhotoSimilar
//
// Created by edy on 2025/5/8.
//
import Foundation
import Photos
import UIKit
@MainActor
class PhotoSimilarManager: @unchecked Sendable {
static let shared = PhotoSimilarManager()
private let stateManager = PhotoSimilarStateManager()
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let fileSizeThreshold: Double = 0.05 // 文件大小相差阈值(5%)
private let resolutionThreshold: Double = 0.05 // 分辨率相差阈值(5%)
// private let hashDistanceThreshold: Int = 10 // pHash汉明距离阈值
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("TimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("SimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "LatestPhotoTimestamp"
func findSimilarAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestPhotoTimeKey)
if lastLatestTime == 0{
// 如果没拿到 说吗是第一次执行,直接拿第一个记录最新资源时间
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestPhotoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupAssetsByTimeWindow(newAssets)
let oldGroups = groupAssetsByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
// 获取已处理的时间组
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 处理新增照片,过滤掉已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
// 检查这个时间组是否已经处理过
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
print("开始处理分组,分组资源为:",unprocessedGroups.count)
let maxConcurrency = 6 // 最大并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.count == 0{
let total = cachedGroups.compactMap{$0.assets}
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
for unGroup in batch {
group.addTask { [weak self] in
// 原任务代码
guard let self = self else {
print("self 为 nil,任务提前退出")
return
}
// 6.1 按文件大小预分组
let sizeGroups = self.groupAssetsBySize(unGroup)
// 6.2 处理每个大小组
for sizeGroup in sizeGroups {
let similarGroups = await self.findSimilarInGroupUsingKMeans(sizeGroup)
if !similarGroups.isEmpty {
for similarGroup in similarGroups {
let groupId = UUID().uuidString
// 6.3 创建资源模型
let assetModels = await createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await stateManager.appendSimilarGroup(SimilarGroupModel(groupId: groupId, assets: assetModels))
if await stateManager.shouldSavePendingGroups() {
await savePendingSimilarGroups()
}
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
return await withCheckedContinuation { continuation in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
let model = AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
continuation.resume(returning: model)
}
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 分组辅助方法
extension PhotoSimilarManager{
nonisolated private func groupAssetsByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
nonisolated private func groupAssetsBySize(_ assets: [PHAsset]) -> [[PHAsset]] {
var sizeGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
// 查找大小相近的资产
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
if isFileSizeSimilar(asset, compareAsset) {
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
}
if currentGroup.count > 1 {
sizeGroups.append(currentGroup)
}
}
return sizeGroups
}
// 文件大小比较
nonisolated private func isFileSizeSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
let size1 = Double(asset1.pixelWidth * asset1.pixelHeight)
let size2 = Double(asset2.pixelWidth * asset2.pixelHeight)
let ratio = abs(size1 - size2) / max(size1, size2)
return ratio <= fileSizeThreshold
}
}
// MARK: - 存储相关方法
extension PhotoSimilarManager{
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
// 加载时间组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
// 加载相似组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
private func loadSimilarGroups() async -> [SimilarGroupModel] {
let groups = await stateManager.getAllSimilarGroups()
// 验证资源有效性
return groups.map { group in
var validAssets = group.assets
validAssets.removeAll { asset in
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [asset.localIdentifier], options: nil)
return fetchResult.firstObject == nil
}
return SimilarGroupModel(groupId: group.groupId, assets: validAssets)
}.filter { !$0.assets.isEmpty }
}
}
// MARK: - pHash获取
extension PhotoSimilarManager{
// 计算图片hash
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
print("返回缓存cachedHash")
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
print("返回缓存hash")
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
// print("开始获取图片hash")
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let tempHash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(tempHash, for: asset.localIdentifier)
}
continuation.resume(returning: tempHash)
} else {
continuation.resume(returning: nil)
}
}
}
}
// 计算图片hash
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
// 计算汉明距离
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
func cancelCurrentOperation() {
currentTask?.cancel()
}
}
// MARK: - 相似度聚类算法
extension PhotoSimilarManager{
// K-Means 聚类算法
func kMeansClustering(data: [[Double]], k: Int, maxIterations: Int = 100) -> [Int] {
guard data.count > 0 && k > 0 && k <= data.count else {
return []
}
var centroids = (0..<k).map { _ in data.randomElement()! }
var labels = Array(repeating: 0, count: data.count)
for _ in 0..<maxIterations {
var newCentroids = Array(repeating: Array(repeating: 0.0, count: data[0].count), count: k)
var clusterCounts = Array(repeating: 0, count: k)
// 分配数据点到最近的质心
for (i, point) in data.enumerated() {
var minDistance = Double.infinity
var closestCentroidIndex = 0
for (j, centroid) in centroids.enumerated() {
let distance = euclideanDistance(point, centroid)
if distance < minDistance && distance < 0.3 {
minDistance = distance
closestCentroidIndex = j
}
}
labels[i] = closestCentroidIndex
newCentroids[closestCentroidIndex] = newCentroids[closestCentroidIndex].enumerated().map { index, value in
value + point[index]
}
clusterCounts[closestCentroidIndex] += 1
}
// 更新质心
var hasChanged = false
for i in 0..<k {
if clusterCounts[i] > 0 {
let newCentroid = newCentroids[i].enumerated().map { index, value in
value / Double(clusterCounts[i])
}
if newCentroid != centroids[i] {
hasChanged = true
centroids[i] = newCentroid
}
}
}
// 如果质心没有变化,提前结束迭代
if !hasChanged {
break
}
}
return labels
}
// 计算欧几里得距离
func euclideanDistance(_ point1: [Double], _ point2: [Double]) -> Double {
let squaredSum = zip(point1, point2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(squaredSum)
}
// 将哈希值转换为数值向量
func hashToVector(_ hash: String) -> [Double] {
return hash.map { $0 == "1" ? 1.0 : 0.0 }
}
private func findSimilarInGroupUsingKMeans(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 获取所有资产的哈希值
let hashes = await withTaskGroup(of: String?.self) { group in
for asset in assets {
group.addTask {
return await self.getOrCalculateHash(for: asset)
}
}
var result: [String?] = []
for await hash in group {
result.append(hash)
}
return result
}
// 将哈希值转换为数值向量
let vectors = hashes.compactMap { $0.map { hashToVector($0) } }
// 使用 K-Means 聚类算法
let k = min(assets.count, 10) // 假设最多 10 个簇
let labels = kMeansClustering(data: vectors, k: k)
// 根据聚类结果分组
var clusters: [[PHAsset]] = Array(repeating: [], count: k)
for (i, label) in labels.enumerated() {
clusters[label].append(assets[i])
}
// 过滤掉只有一个元素的簇
return clusters.filter { $0.count > 1 }
}
}
// MARK: - 废弃方法
extension PhotoSimilarManager{
// private func findSimilarInGroup(_ assets: [PHAsset]) async -> [[PHAsset]] {
// let startTime = Date()
// print("开始执行 findSimilarInGroup,资产数量:", assets.count)
// guard !Task.isCancelled else {
// print("异步任务已取消")
// return []
// } // 添加取消检查
// var similarGroups: [[PHAsset]] = []
// let count = assets.count
// var isProcessed = Array(repeating: false, count: count)
//
// for i in 0..<count {
// if isProcessed[i] {
// continue
// }
//
// var currentGroup = [assets[i]]
// isProcessed[i] = true
//
// for j in (i + 1)..<count {
// if isProcessed[j] {
// continue
// }
//
// if await areAssetsSimilar(assets[i], assets[j]) {
// currentGroup.append(assets[j])
// isProcessed[j] = true
// }
// }
//
// if currentGroup.count > 1 {
// similarGroups.append(currentGroup)
// }
// }
// let endTime = Date()
// let elapsedTime = endTime.timeIntervalSince(startTime)
// print("拿到相似组内数据",similarGroups.count, ",耗时:", elapsedTime, "秒")
// return similarGroups
// }
// 相似度比较
// private func areAssetsSimilar(_ asset1: PHAsset, _ asset2: PHAsset) async -> Bool {
// // 1. 检查分辨率
// if !isResolutionSimilar(asset1, asset2) {
// return false
// }
// return await compareAssetHashes(asset1, asset2)
// }
//
// // 分辨率比较
// private func isResolutionSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
// let res1 = Double(asset1.pixelWidth * asset1.pixelHeight)
// let res2 = Double(asset2.pixelWidth * asset2.pixelHeight)
// let ratio = abs(res1 - res2) / max(res1, res2)
// return ratio <= resolutionThreshold
// }
//
// // 比较图片哈希值
// private func compareAssetHashes(_ asset1: PHAsset, _ asset2: PHAsset) async -> Bool {
// print("获取哈希值开始")
// // 获取或计算 hash
// async let hash1 = getOrCalculateHash(for: asset1)
// async let hash2 = getOrCalculateHash(for: asset2)
//
// guard let h1 = await hash1, let h2 = await hash2 else { return false }
// print("拿到的hash1\(h1),拿到的hash2\(h2)")
// let distance = calculateHammingDistance(h1, h2)
// return distance < hashDistanceThreshold
// }
// 获取phash
// private func getOrCalculateHash(for asset: PHAsset) async -> String? {
// // 从缓存获取图片
// if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
// return calculateImageHash(cachedImage)
// }
//
// let options = PHImageRequestOptions()
// options.version = .original
// let targetSize = CGSize(width: 32, height: 32)
//
// // 使用 async/await 替代 semaphore
// return await withCheckedContinuation { continuation in
//
// PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
// if let image = image, let self = self {
// let tempHash = self.calculateImageHash(image)
//
// // 保存到缓存
// Task {
// await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
// }
//
// continuation.resume(returning: tempHash)
// } else {
// continuation.resume(returning: nil)
// }
// }
// }
// }
// 创建资源模型
// private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
// return await withTaskGroup(of: AssetModel.self) { modelGroup in
// var models: [AssetModel] = []
//
// for asset in assets {
// modelGroup.addTask {
// return await withCheckedContinuation { continuation in
// let options = PHImageRequestOptions()
// options.isSynchronous = false
// options.version = .original
// options.isNetworkAccessAllowed = false
// PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
// let assetSize = Double(data?.count ?? 0)
// let model = AssetModel(
// localIdentifier: asset.localIdentifier,
// assetSize: assetSize,
// createDate: asset.creationDate ?? Date(),
// mediaType: 1
// )
// continuation.resume(returning: model)
// }
// }
// }
// }
//
// for await model in modelGroup {
// models.append(model)
// }
// return models
// }
// }
}
//
// ScreenShotSimilarManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import Photos
import UIKit
@MainActor
class ScreenshotSimilarJSONManager: @unchecked Sendable {
static let shared = ScreenshotSimilarJSONManager()
private let stateManager = PhotoSimilarStateManager()
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let fileSizeThreshold: Double = 0.01 // 文件大小相差阈值(1%)
private let resolutionThreshold: Double = 0.01 // 分辨率相差阈值(1%)
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("screenshotTimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("screenshotSimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "screenshotLatestPhotoTimestamp"
func findSimilarAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestPhotoTimeKey)
if lastLatestTime == 0{
// 如果没拿到 说吗是第一次执行,直接拿第一个记录最新资源时间
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestPhotoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupAssetsByTimeWindow(newAssets)
let oldGroups = groupAssetsByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
// 获取已处理的时间组
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 处理新增照片,过滤掉已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
// 检查这个时间组是否已经处理过
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
print("开始处理分组,分组资源为:",unprocessedGroups.count)
let maxConcurrency = 3 // 最大并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.count == 0{
let total = cachedGroups.compactMap{$0.assets}
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
for unGroup in batch {
group.addTask { [weak self] in
// 原任务代码
guard let self = self else {
print("self 为 nil,任务提前退出")
return
}
// 6.1 按文件大小预分组
let sizeGroups = self.groupAssetsBySize(unGroup)
// 6.2 处理每个大小组
for sizeGroup in sizeGroups {
let similarGroups = await self.findSimilarInGroupUsingKMeans(sizeGroup)
if !similarGroups.isEmpty {
for similarGroup in similarGroups {
let groupId = UUID().uuidString
// 6.3 创建资源模型
let assetModels = await createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await stateManager.appendSimilarGroup(SimilarGroupModel(groupId: groupId, assets: assetModels))
if await stateManager.shouldSavePendingGroups() {
await savePendingSimilarGroups()
}
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
// 创建资源模型
// private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
// return await withTaskGroup(of: AssetModel.self) { modelGroup in
// var models: [AssetModel] = []
//
// for asset in assets {
// modelGroup.addTask {
// return await withCheckedContinuation { continuation in
// let options = PHImageRequestOptions()
// options.isSynchronous = false
// options.version = .original
// options.isNetworkAccessAllowed = false
// PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
// let assetSize = Double(data?.count ?? 0)
// let model = AssetModel(
// localIdentifier: asset.localIdentifier,
// assetSize: assetSize,
// createDate: asset.creationDate ?? Date(),
// mediaType: 1
// )
// continuation.resume(returning: model)
// }
// }
// }
// }
//
// for await model in modelGroup {
// models.append(model)
// }
// return models
// }
// }
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
return await withCheckedContinuation { continuation in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
let model = AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
continuation.resume(returning: model)
}
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 分组辅助方法
extension ScreenshotSimilarJSONManager{
nonisolated private func groupAssetsByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
nonisolated private func groupAssetsBySize(_ assets: [PHAsset]) -> [[PHAsset]] {
var sizeGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
// 查找大小相近的资产
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
if isFileSizeSimilar(asset, compareAsset) {
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
}
if currentGroup.count > 1 {
sizeGroups.append(currentGroup)
}
}
return sizeGroups
}
// 文件大小比较
nonisolated private func isFileSizeSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
let size1 = Double(asset1.pixelWidth * asset1.pixelHeight)
let size2 = Double(asset2.pixelWidth * asset2.pixelHeight)
let ratio = abs(size1 - size2) / max(size1, size2)
return ratio <= fileSizeThreshold
}
}
// MARK: - 存储相关方法
extension ScreenshotSimilarJSONManager{
//加载本地存储资源
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
// 加载时间组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
// 加载相似组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
// 保存时间分组
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
private func loadSimilarGroups() async -> [SimilarGroupModel] {
let groups = await stateManager.getAllSimilarGroups()
// 验证资源有效性
return groups.map { group in
var validAssets = group.assets
validAssets.removeAll { asset in
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [asset.localIdentifier], options: nil)
return fetchResult.firstObject == nil
}
return SimilarGroupModel(groupId: group.groupId, assets: validAssets)
}.filter { !$0.assets.isEmpty }
}
}
// MARK: - pHash获取
extension ScreenshotSimilarJSONManager{
// 计算图片hash
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
print("返回缓存cachedHash")
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
print("返回缓存hash")
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let tempHash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(tempHash, for: asset.localIdentifier)
}
continuation.resume(returning: tempHash)
} else {
continuation.resume(returning: nil)
}
}
}
}
// 计算图片hash
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
// 计算汉明距离
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
func cancelCurrentOperation() {
currentTask?.cancel()
}
}
// MARK: - 相似度聚类算法
extension ScreenshotSimilarJSONManager{
// K-Means 聚类算法
func kMeansClustering(data: [[Double]], k: Int, maxIterations: Int = 100) -> [Int] {
guard data.count > 0 && k > 0 && k <= data.count else {
return []
}
var centroids = (0..<k).map { _ in data.randomElement()! }
var labels = Array(repeating: 0, count: data.count)
for _ in 0..<maxIterations {
var newCentroids = Array(repeating: Array(repeating: 0.0, count: data[0].count), count: k)
var clusterCounts = Array(repeating: 0, count: k)
// 分配数据点到最近的质心
for (i, point) in data.enumerated() {
var minDistance = Double.infinity
var closestCentroidIndex = 0
for (j, centroid) in centroids.enumerated() {
let distance = euclideanDistance(point, centroid)
if distance < minDistance && distance < 0.3 {
minDistance = distance
closestCentroidIndex = j
}
}
labels[i] = closestCentroidIndex
newCentroids[closestCentroidIndex] = newCentroids[closestCentroidIndex].enumerated().map { index, value in
value + point[index]
}
clusterCounts[closestCentroidIndex] += 1
}
// 更新质心
var hasChanged = false
for i in 0..<k {
if clusterCounts[i] > 0 {
let newCentroid = newCentroids[i].enumerated().map { index, value in
value / Double(clusterCounts[i])
}
if newCentroid != centroids[i] {
hasChanged = true
centroids[i] = newCentroid
}
}
}
// 如果质心没有变化,提前结束迭代
if !hasChanged {
break
}
}
return labels
}
// 计算欧几里得距离
func euclideanDistance(_ point1: [Double], _ point2: [Double]) -> Double {
let squaredSum = zip(point1, point2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(squaredSum)
}
// 将哈希值转换为数值向量
func hashToVector(_ hash: String) -> [Double] {
return hash.map { $0 == "1" ? 1.0 : 0.0 }
}
private func findSimilarInGroupUsingKMeans(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 获取所有资产的哈希值
let hashes = await withTaskGroup(of: String?.self) { group in
for asset in assets {
group.addTask {
return await self.getOrCalculateHash(for: asset)
}
}
var result: [String?] = []
for await hash in group {
result.append(hash)
}
return result
}
// 将哈希值转换为数值向量
let vectors = hashes.compactMap { $0.map { hashToVector($0) } }
// 使用 K-Means 聚类算法
let k = min(assets.count, 10) // 假设最多 10 个簇
let labels = kMeansClustering(data: vectors, k: k)
// 根据聚类结果分组
var clusters: [[PHAsset]] = Array(repeating: [], count: k)
for (i, label) in labels.enumerated() {
clusters[label].append(assets[i])
}
// 过滤掉只有一个元素的簇
return clusters.filter { $0.count > 1 }
}
}
//
// VideoSimilarManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import Photos
import UIKit
import AVFoundation
// MARK: - 缓存机制
private struct VideoAssetCache {
let avAsset: AVAsset
let size: Int64
let frameRate: Double
let firstFrame: UIImage?
}
private actor VideoAssetCacheManager {
private var cache: [String: VideoAssetCache] = [:]
private let maxCacheSize = 50 // 最大缓存数量
func getCache(for identifier: String) -> VideoAssetCache? {
return cache[identifier]
}
func setCache(_ videoCache: VideoAssetCache, for identifier: String) {
// 如果缓存过大,移除最早的项
if cache.count >= maxCacheSize {
let oldestKey = cache.keys.first
if let key = oldestKey {
cache.removeValue(forKey: key)
}
}
cache[identifier] = videoCache
}
func clearCache() {
cache.removeAll()
}
}
@MainActor
class VideoSimilarJSONManager: @unchecked Sendable {
static let shared = VideoSimilarJSONManager()
private let stateManager = VideoSimilarStateManager()
private init() {}
// 类中添加缓存管理器实例
private let assetCacheManager = VideoAssetCacheManager()
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let durationThreshold: Double = 0.1 // 时长相差阈值(20%)
private let resolutionThreshold: Double = 0.2 // 分辨率相差阈值(20%)
private let frameRateThreshold: Double = 0.1 // 帧率相差阈值(10%)
private let hashDistanceThreshold: Int = 10 // 第一帧图像hash汉明距离阈值
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("VideoTimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("VideoSimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
private let latestVideoTimeKey = "LatestVideoTimestamp"
// MARK: - 主要处理函数
func findSimilarVideos(in assets: [PHAsset],
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestVideoTimeKey)
if lastLatestTime == 0{
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestVideoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupVideosByTimeWindow(newAssets)
let oldGroups = groupVideosByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 过滤已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
// 6. 并发处理未处理的组
let maxConcurrency = 4 // 视频处理较重,降低并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.isEmpty {
let total = cachedGroups.compactMap { $0.assets }
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
autoreleasepool {
for unGroup in batch {
group.addTask { [weak self] in
guard let self = self else { return }
// 6.1 按视频特征预分组
let preGroups = await self.groupVideosByFeatures(unGroup)
// 6.2 处理每个预分组
for preGroup in preGroups {
let similarGroups = await self.findSimilarInGroupByFirstFrame(preGroup)
// 6.3 保存相似组
for similarGroup in similarGroups {
let groupId = UUID().uuidString
let assetModels = await self.createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await self.stateManager.appendSimilarGroup(
SimilarGroupModel(groupId: groupId, assets: assetModels)
)
if await self.stateManager.shouldSavePendingGroups() {
await self.savePendingSimilarGroups()
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
completionHandler?(allGroups.map { $0.assets })
}
}
}
// MARK: - 视频特征分组
private func groupVideosByFeatures(_ assets: [PHAsset]) async -> [[PHAsset]] {
var featureGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
let assetDuration = asset.duration
let assetSize = Double(asset.pixelWidth * asset.pixelHeight)
// 获取视频帧率
let assetFrameRate = await getVideoFrameRate(asset)
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
// 比较时长
let durationRatio = abs(compareAsset.duration - assetDuration) / max(compareAsset.duration, assetDuration)
if durationRatio > durationThreshold {
continue
}
// 比较分辨率
let compareSize = Double(compareAsset.pixelWidth * compareAsset.pixelHeight)
let sizeRatio = abs(compareSize - assetSize) / max(compareSize, assetSize)
if sizeRatio > resolutionThreshold {
continue
}
// 比较帧率
let compareFrameRate = await getVideoFrameRate(compareAsset)
let frameRateRatio = abs(compareFrameRate - assetFrameRate) / max(compareFrameRate, assetFrameRate)
if frameRateRatio > frameRateThreshold {
continue
}
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
if currentGroup.count > 1 {
featureGroups.append(currentGroup)
}
}
return featureGroups
}
private func getVideoFrameRate(_ asset: PHAsset) async -> Double {
// 先检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.frameRate
}
return await withCheckedContinuation { continuation in
let options = PHVideoRequestOptions()
options.version = .original
options.deliveryMode = .fastFormat
options.isNetworkAccessAllowed = false
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { [weak self] avAsset, _, _ in
guard let self = self,
let videoAsset = avAsset,
let track = videoAsset.tracks(withMediaType: .video).first else {
continuation.resume(returning: 0.0)
return
}
// 获取第一帧
let generator = AVAssetImageGenerator(asset: videoAsset)
generator.appliesPreferredTrackTransform = true
generator.maximumSize = CGSize(width: 640, height: 640)
var firstFrame: UIImage?
do {
let cgImage = try generator.copyCGImage(at: .zero, actualTime: nil)
firstFrame = UIImage(cgImage: cgImage)
} catch {
print("获取第一帧失败:", error)
}
// 创建并保存缓存
let frameRate = Double(track.nominalFrameRate)
let cache = VideoAssetCache(
avAsset: videoAsset,
size: 0, // 这里可以根据需要获取视频大小
frameRate: frameRate,
firstFrame: firstFrame
)
Task {
await self.assetCacheManager.setCache(cache, for: asset.localIdentifier)
}
continuation.resume(returning: frameRate)
}
}
}
private func getFirstFrame(for asset: PHAsset) async -> UIImage? {
// 先检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.firstFrame
}
// 如果缓存中没有,调用 getVideoFrameRate 会自动缓存 VideoAssetCache
_ = await getVideoFrameRate(asset)
// 再次检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.firstFrame
}
return nil
}
// MARK: - 第一帧相似度比较
private func findSimilarInGroupByFirstFrame(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 1. 获取所有视频第一帧的hash
var assetHashes: [(asset: PHAsset, hash: String)] = []
for asset in assets {
if let hash = await getFirstFrameHash(for: asset) {
assetHashes.append((asset, hash))
}
}
guard !assetHashes.isEmpty else { return [] }
// 2. 将hash字符串转换为特征向量
let vectors = assetHashes.map { hashToVector($0.hash) }
// 3. 执行K-Means聚类
let k = min(vectors.count / 2, max(2, Int(sqrt(Double(vectors.count)))))
let clusters = kMeansClustering(vectors: vectors, k: k)
// 4. 将聚类结果转换为相似组
var similarGroups: [[PHAsset]] = []
for cluster in clusters {
let groupAssets = cluster.indices.map { assetHashes[$0].asset }
if groupAssets.count > 1 {
similarGroups.append(groupAssets)
}
}
return similarGroups
}
// 将hash字符串转换为特征向量
private func hashToVector(_ hash: String) -> [Double] {
var vector: [Double] = []
let chunkSize = 8
// 将二进制hash字符串按8位分组,转换为数值
for i in stride(from: 0, to: hash.count, by: chunkSize) {
let endIndex = min(i + chunkSize, hash.count)
let start = hash.index(hash.startIndex, offsetBy: i)
let end = hash.index(hash.startIndex, offsetBy: endIndex)
let chunk = String(hash[start..<end])
if let value = Int(chunk, radix: 2) {
vector.append(Double(value))
}
}
return vector
}
// K-Means聚类算法实现
private func kMeansClustering(vectors: [[Double]], k: Int) -> [[Int]] {
guard vectors.count >= k else { return [Array(0..<vectors.count)] }
// 1. 随机选择初始中心点
var centroids = (0..<k).map { _ in vectors[Int.random(in: 0..<vectors.count)] }
var clusters: [[Int]] = Array(repeating: [], count: k)
var previousClusters: [[Int]] = []
// 2. 迭代直到收敛或达到最大迭代次数
let maxIterations = 100
var iteration = 0
while iteration < maxIterations {
// 清空当前聚类
clusters = Array(repeating: [], count: k)
// 3. 分配点到最近的中心点
for (index, vector) in vectors.enumerated() {
var minDistance = Double.infinity
var closestCentroid = 0
for (centroidIndex, centroid) in centroids.enumerated() {
let distance = euclideanDistance(vector, centroid)
if distance < minDistance {
minDistance = distance
closestCentroid = centroidIndex
}
}
clusters[closestCentroid].append(index)
}
// 4. 检查是否收敛
if clusters == previousClusters {
break
}
// 5. 更新中心点
for i in 0..<k {
guard !clusters[i].isEmpty else { continue }
let clusterVectors = clusters[i].map { vectors[$0] }
centroids[i] = calculateMean(clusterVectors)
}
previousClusters = clusters
iteration += 1
}
return clusters
}
// 计算欧氏距离
private func euclideanDistance(_ v1: [Double], _ v2: [Double]) -> Double {
guard v1.count == v2.count else { return Double.infinity }
let sum = zip(v1, v2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(sum)
}
// 计算向量均值
private func calculateMean(_ vectors: [[Double]]) -> [Double] {
guard !vectors.isEmpty else { return [] }
let count = Double(vectors.count)
var mean = Array(repeating: 0.0, count: vectors[0].count)
for vector in vectors {
for (i, value) in vector.enumerated() {
mean[i] += value / count
}
}
return mean
}
// MARK: - 获取视频第一帧
private func getFirstFrameHash(for asset: PHAsset) async -> String? {
// 先检查缓存
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
return cachedHash
}
// 获取第一帧图像
let image = await getFirstFrame(for: asset)
guard let image = image else { return nil }
// 计算hash
let hash = calculateImageHash(image)
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
await stateManager.setCachedImage(image, for: asset.localIdentifier)
return hash
}
// MARK: - 辅助方法
private func groupVideosByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
let size = CGSize(width: 32, height: 32)
UIGraphicsBeginImageContextWithOptions(size, true, 1.0)
image.draw(in: CGRect(origin: .zero, size: size))
guard UIGraphicsGetImageFromCurrentImageContext() != nil else {
UIGraphicsEndImageContext()
return ""
}
UIGraphicsEndImageContext()
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
private func createAssetModels(from assets: [PHAsset]) -> [AssetModel] {
return assets.map { asset in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 2
)
}
}
}
// MARK: - 存储相关方法
extension VideoSimilarJSONManager {
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
}
// private func getFirstFrame(for asset: PHAsset) async -> UIImage? {
// return await withCheckedContinuation { continuation in
// let options = PHVideoRequestOptions()
// options.version = .original
// options.deliveryMode = .fastFormat
// options.isNetworkAccessAllowed = false
//
// // 将 options 的配置复制到闭包内
// PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
// guard let videoAsset = avAsset else {
// continuation.resume(returning: nil)
// return
// }
//
// let generator = AVAssetImageGenerator(asset: videoAsset)
// generator.appliesPreferredTrackTransform = true
// generator.maximumSize = CGSize(width: 640, height: 640)
//
// do {
// let cgImage = try generator.copyCGImage(at: .zero, actualTime: nil)
// let image = UIImage(cgImage: cgImage)
// continuation.resume(returning: image)
// } catch {
// continuation.resume(returning: nil)
// }
// }
// }
// }
// private func getVideoFrameRate(_ asset: PHAsset) async -> Double {
// return await withCheckedContinuation { continuation in
// let options = PHVideoRequestOptions()
// options.version = .original
// options.deliveryMode = .fastFormat
// options.isNetworkAccessAllowed = false
//
// PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
// if let videoAsset = avAsset, let track = videoAsset.tracks(withMediaType: .video).first {
// continuation.resume(returning: Double(track.nominalFrameRate))
// } else {
// continuation.resume(returning: 0.0)
// }
// }
// }
// }
...@@ -358,28 +358,28 @@ class HomePhotosModel:Codable { ...@@ -358,28 +358,28 @@ class HomePhotosModel:Codable {
} }
class AssetModel :Codable,Hashable { //class AssetModel :Codable,Hashable {
var localIdentifier : String // var localIdentifier : String
var assetSize : Double // var assetSize : Double
var createDate : Date // var createDate : Date
init(localIdentifier: String, assetSize: Double, createDate: Date) { // init(localIdentifier: String, assetSize: Double, createDate: Date) {
self.localIdentifier = localIdentifier // self.localIdentifier = localIdentifier
self.assetSize = assetSize // self.assetSize = assetSize
self.createDate = createDate // self.createDate = createDate
} // }
//
func hash(into hasher: inout Hasher) { // func hash(into hasher: inout Hasher) {
hasher.combine(localIdentifier) // hasher.combine(localIdentifier)
hasher.combine(assetSize) // hasher.combine(assetSize)
hasher.combine(createDate) // hasher.combine(createDate)
} // }
//
static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool { // static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool {
return lhs.localIdentifier == rhs.localIdentifier && // return lhs.localIdentifier == rhs.localIdentifier &&
lhs.assetSize == rhs.assetSize && // lhs.assetSize == rhs.assetSize &&
lhs.createDate == rhs.createDate // lhs.createDate == rhs.createDate
} // }
} //}
...@@ -6,19 +6,38 @@ ...@@ -6,19 +6,38 @@
// //
import UIKit import UIKit
import Lottie
class NewGuideOneCell: UICollectionViewCell { class NewGuideOneCell: UICollectionViewCell {
@IBOutlet weak var subtitle: UILabel! @IBOutlet weak var subtitle: UILabel!
@IBOutlet weak var htitle: UILabel! @IBOutlet weak var htitle: UILabel!
@IBOutlet weak var userL: UILabel!
override func awakeFromNib() { override func awakeFromNib() {
super.awakeFromNib() super.awakeFromNib()
setSubtitleAttr() setSubtitleAttr()
sethtitleAttr() sethtitleAttr()
addSubview(animationView)
animationView.snp.makeConstraints { make in
make.width.equalTo(375.RW())
make.height.equalTo(255.RW())
make.right.equalToSuperview()
make.top.equalTo(userL.snp.bottom).offset(35)
}
animationView.play()
} }
lazy var animationView : LottieAnimationView = {
let animationView = LottieAnimationView(name: "animation_guide_0")
animationView.loopMode = .loop
return animationView
}()
func setSubtitleAttr(){ func setSubtitleAttr(){
// 创建富文本 // 创建富文本
let fullText = "Over 1000+ users worldwide" let fullText = "Over 1000+ users worldwide"
......
...@@ -43,10 +43,10 @@ ...@@ -43,10 +43,10 @@
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_ye" translatesAutoresizingMaskIntoConstraints="NO" id="qJm-ym-n7r"> <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_ye" translatesAutoresizingMaskIntoConstraints="NO" id="qJm-ym-n7r">
<rect key="frame" x="37" y="312.66666666666669" width="159" height="45"/> <rect key="frame" x="37" y="284.66666666666669" width="159" height="45"/>
</imageView> </imageView>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Dp0-fO-xVX"> <view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="Dp0-fO-xVX">
<rect key="frame" x="201" y="330.33333333333331" width="1" height="10"/> <rect key="frame" x="201" y="302.33333333333331" width="1" height="10"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/> <color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="10" id="iOh-nB-kgO"/> <constraint firstAttribute="height" constant="10" id="iOh-nB-kgO"/>
...@@ -54,34 +54,34 @@ ...@@ -54,34 +54,34 @@
</constraints> </constraints>
</view> </view>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_ye" translatesAutoresizingMaskIntoConstraints="NO" id="tPc-eM-ini"> <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_ye" translatesAutoresizingMaskIntoConstraints="NO" id="tPc-eM-ini">
<rect key="frame" x="207" y="312.66666666666669" width="159" height="45"/> <rect key="frame" x="207" y="284.66666666666669" width="159" height="45"/>
</imageView> </imageView>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="50,000+" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Gkr-xl-yIC"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="50,000+" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Gkr-xl-yIC">
<rect key="frame" x="241" y="316.66666666666669" width="91" height="27"/> <rect key="frame" x="241" y="288.66666666666669" width="91" height="27"/>
<fontDescription key="fontDescription" type="boldSystem" pointSize="22"/> <fontDescription key="fontDescription" type="boldSystem" pointSize="22"/>
<color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Clean up photos" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="BBI-So-OYY"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Clean up photos" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="BBI-So-OYY">
<rect key="frame" x="242.66666666666666" y="343.66666666666669" width="87.666666666666657" height="13.333333333333314"/> <rect key="frame" x="242.66666666666666" y="315.66666666666669" width="87.666666666666657" height="13.333333333333314"/>
<fontDescription key="fontDescription" type="system" weight="medium" pointSize="11"/> <fontDescription key="fontDescription" type="system" weight="medium" pointSize="11"/>
<color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="1,000+" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="btU-j2-5y8"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="1,000+" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="btU-j2-5y8">
<rect key="frame" x="79.666666666666671" y="316.66666666666669" width="74.000000000000014" height="27"/> <rect key="frame" x="79.666666666666671" y="288.66666666666669" width="74.000000000000014" height="27"/>
<fontDescription key="fontDescription" type="boldSystem" pointSize="22"/> <fontDescription key="fontDescription" type="boldSystem" pointSize="22"/>
<color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="User" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Nyn-Jp-1go"> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="User" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="Nyn-Jp-1go">
<rect key="frame" x="104" y="343.66666666666669" width="25.333333333333343" height="13.333333333333314"/> <rect key="frame" x="104" y="315.66666666666669" width="25.333333333333343" height="13.333333333333314"/>
<fontDescription key="fontDescription" type="system" weight="medium" pointSize="11"/> <fontDescription key="fontDescription" type="system" weight="medium" pointSize="11"/>
<color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <color key="textColor" red="0.08235294118" green="0.08235294118" blue="0.08235294118" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/> <nil key="highlightedColor"/>
</label> </label>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_huadong" translatesAutoresizingMaskIntoConstraints="NO" id="dtg-ck-CGz"> <imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_huadong" translatesAutoresizingMaskIntoConstraints="NO" id="dtg-ck-CGz">
<rect key="frame" x="28" y="392.66666666666669" width="375" height="214.00000000000006"/> <rect key="frame" x="28" y="364.66666666666669" width="375" height="214.00000000000006"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="214" id="OqW-5R-Kz3" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstAttribute="height" constant="214" id="OqW-5R-Kz3" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
<constraint firstAttribute="width" constant="375" id="WrQ-JG-AK0" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstAttribute="width" constant="375" id="WrQ-JG-AK0" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
...@@ -112,7 +112,7 @@ ...@@ -112,7 +112,7 @@
<constraint firstItem="Ofu-Vm-8RJ" firstAttribute="top" secondItem="Txd-ub-V2M" secondAttribute="bottom" constant="8" id="n7i-Ki-OaQ"/> <constraint firstItem="Ofu-Vm-8RJ" firstAttribute="top" secondItem="Txd-ub-V2M" secondAttribute="bottom" constant="8" id="n7i-Ki-OaQ"/>
<constraint firstItem="Txd-ub-V2M" firstAttribute="top" secondItem="EVM-dM-Bi7" secondAttribute="bottom" constant="60" id="pa0-0g-Sk0" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstItem="Txd-ub-V2M" firstAttribute="top" secondItem="EVM-dM-Bi7" secondAttribute="bottom" constant="60" id="pa0-0g-Sk0" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
<constraint firstItem="btU-j2-5y8" firstAttribute="top" secondItem="qJm-ym-n7r" secondAttribute="top" constant="4" id="snF-F2-byq"/> <constraint firstItem="btU-j2-5y8" firstAttribute="top" secondItem="qJm-ym-n7r" secondAttribute="top" constant="4" id="snF-F2-byq"/>
<constraint firstItem="qJm-ym-n7r" firstAttribute="top" secondItem="Ofu-Vm-8RJ" secondAttribute="bottom" constant="88" id="uyA-mj-8K2" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstItem="qJm-ym-n7r" firstAttribute="top" secondItem="Ofu-Vm-8RJ" secondAttribute="bottom" constant="60" id="uyA-mj-8K2" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
<constraint firstItem="Dp0-fO-xVX" firstAttribute="centerX" secondItem="gTV-IL-0wX" secondAttribute="centerX" id="vn8-co-5In"/> <constraint firstItem="Dp0-fO-xVX" firstAttribute="centerX" secondItem="gTV-IL-0wX" secondAttribute="centerX" id="vn8-co-5In"/>
<constraint firstItem="dtg-ck-CGz" firstAttribute="top" secondItem="qJm-ym-n7r" secondAttribute="bottom" constant="35" id="wzG-NF-EWD"/> <constraint firstItem="dtg-ck-CGz" firstAttribute="top" secondItem="qJm-ym-n7r" secondAttribute="bottom" constant="35" id="wzG-NF-EWD"/>
<constraint firstItem="Nyn-Jp-1go" firstAttribute="centerX" secondItem="btU-j2-5y8" secondAttribute="centerX" id="y6x-9h-nNx"/> <constraint firstItem="Nyn-Jp-1go" firstAttribute="centerX" secondItem="btU-j2-5y8" secondAttribute="centerX" id="y6x-9h-nNx"/>
...@@ -122,6 +122,7 @@ ...@@ -122,6 +122,7 @@
<connections> <connections>
<outlet property="htitle" destination="Ofu-Vm-8RJ" id="YM4-rh-Ra7"/> <outlet property="htitle" destination="Ofu-Vm-8RJ" id="YM4-rh-Ra7"/>
<outlet property="subtitle" destination="Txd-ub-V2M" id="gKq-qF-heh"/> <outlet property="subtitle" destination="Txd-ub-V2M" id="gKq-qF-heh"/>
<outlet property="userL" destination="Nyn-Jp-1go" id="inq-W9-7hO"/>
</connections> </connections>
<point key="canvasLocation" x="385.49618320610688" y="259.50704225352115"/> <point key="canvasLocation" x="385.49618320610688" y="259.50704225352115"/>
</collectionViewCell> </collectionViewCell>
......
//
// NewGuideThreeCell.swift
// PhoneManager
//
// Created by edy on 2025/4/28.
//
import UIKit
import Lottie
class NewGuideThreeCell: UICollectionViewCell {
@IBOutlet weak var subT: UILabel!
override func awakeFromNib() {
super.awakeFromNib()
addSubview(animationView)
animationView.snp.makeConstraints { make in
make.width.equalTo(280.RW())
make.height.equalTo(274.RW())
make.centerX.equalToSuperview().offset(-10)
make.top.equalTo(subT.snp.bottom).offset(57)
}
animationView.play()
}
lazy var animationView : LottieAnimationView = {
let animationView = LottieAnimationView(name: "animation_guide_2")
animationView.loopMode = .loop
return animationView
}()
}
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
<rect key="frame" x="0.0" y="0.0" width="414" height="678"/> <rect key="frame" x="0.0" y="0.0" width="414" height="678"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<subviews> <subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_tongjitu" translatesAutoresizingMaskIntoConstraints="NO" id="vPU-c0-KKP"> <imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_tongjitu" translatesAutoresizingMaskIntoConstraints="NO" id="vPU-c0-KKP">
<rect key="frame" x="85" y="316" width="244" height="239"/> <rect key="frame" x="85" y="316" width="244" height="239"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="239" id="4zt-cu-Hqj" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstAttribute="height" constant="239" id="4zt-cu-Hqj" customClass="ScreenWidthRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
...@@ -66,6 +66,9 @@ ...@@ -66,6 +66,9 @@
<constraint firstItem="ZvL-Z4-1T4" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" constant="47" id="zT2-fE-W1k"/> <constraint firstItem="ZvL-Z4-1T4" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" constant="47" id="zT2-fE-W1k"/>
</constraints> </constraints>
<size key="customSize" width="414" height="678"/> <size key="customSize" width="414" height="678"/>
<connections>
<outlet property="subT" destination="HR3-Fr-izE" id="ztb-Xt-6Rt"/>
</connections>
<point key="canvasLocation" x="62.595419847328245" y="241.5492957746479"/> <point key="canvasLocation" x="62.595419847328245" y="241.5492957746479"/>
</collectionViewCell> </collectionViewCell>
</objects> </objects>
......
...@@ -13,6 +13,7 @@ class NewGuideTwoCell: UICollectionViewCell { ...@@ -13,6 +13,7 @@ class NewGuideTwoCell: UICollectionViewCell {
@IBOutlet weak var photoL: UILabel! @IBOutlet weak var photoL: UILabel!
var sizeL:UILabel! var sizeL:UILabel!
@IBOutlet weak var subL: UILabel!
override func awakeFromNib() { override func awakeFromNib() {
super.awakeFromNib() super.awakeFromNib()
contentView.addSubview(animationView) contentView.addSubview(animationView)
...@@ -38,10 +39,26 @@ class NewGuideTwoCell: UICollectionViewCell { ...@@ -38,10 +39,26 @@ class NewGuideTwoCell: UICollectionViewCell {
} }
playAnimationWithDelay() playAnimationWithDelay()
addSubview(animationPhotoView)
animationPhotoView.snp.makeConstraints { make in
make.width.equalTo(357.RW())
make.height.equalTo(310.RW())
make.centerX.equalToSuperview()
make.top.equalTo(subL.snp.bottom).offset(0)
}
animationPhotoView.play()
} }
lazy var animationPhotoView : LottieAnimationView = {
let animationView = LottieAnimationView(name: "animation_guide_1")
animationView.loopMode = .loop
return animationView
}()
lazy var animationView:LottieAnimationView = { lazy var animationView:LottieAnimationView = {
let animationView = LottieAnimationView(name: "onboardingStorageLight") let animationView = LottieAnimationView(name: "onboardingStorageLight")
......
...@@ -59,7 +59,7 @@ ...@@ -59,7 +59,7 @@
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="img_icloud_start" translatesAutoresizingMaskIntoConstraints="NO" id="CQO-7f-Ijg"> <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="img_icloud_start" translatesAutoresizingMaskIntoConstraints="NO" id="CQO-7f-Ijg">
<rect key="frame" x="223.66666666666666" y="455.33333333333331" width="64.333333333333343" height="63.999999999999943"/> <rect key="frame" x="223.66666666666666" y="455.33333333333331" width="64.333333333333343" height="63.999999999999943"/>
</imageView> </imageView>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_zhaopian" translatesAutoresizingMaskIntoConstraints="NO" id="hqC-23-RfL"> <imageView hidden="YES" clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="icon_guide_zhaopian" translatesAutoresizingMaskIntoConstraints="NO" id="hqC-23-RfL">
<rect key="frame" x="24.666666666666657" y="110.33333333333331" width="357" height="365"/> <rect key="frame" x="24.666666666666657" y="110.33333333333331" width="357" height="365"/>
<constraints> <constraints>
<constraint firstAttribute="height" constant="365" id="7gF-YC-st8" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/> <constraint firstAttribute="height" constant="365" id="7gF-YC-st8" customClass="ScreenHeightRatioConstraint" customModule="PhoneManager" customModuleProvider="target"/>
...@@ -92,6 +92,7 @@ ...@@ -92,6 +92,7 @@
<size key="customSize" width="330" height="731"/> <size key="customSize" width="330" height="731"/>
<connections> <connections>
<outlet property="photoL" destination="FB0-xV-7Fl" id="9dX-pb-iYV"/> <outlet property="photoL" destination="FB0-xV-7Fl" id="9dX-pb-iYV"/>
<outlet property="subL" destination="zTf-gE-tij" id="ao7-5z-jU1"/>
</connections> </connections>
<point key="canvasLocation" x="358.77862595419845" y="269.36619718309862"/> <point key="canvasLocation" x="358.77862595419845" y="269.36619718309862"/>
</collectionViewCell> </collectionViewCell>
......
...@@ -16,10 +16,10 @@ class HomePayView:UIView { ...@@ -16,10 +16,10 @@ class HomePayView:UIView {
private var closeBtn:UIButton? private var closeBtn:UIButton?
private var titleLabel1:UILabel? private var titleLabel1:UILabel?
private var titleLabel2:UILabel? private var titleLabel2:UILabel?
private var photoImage:UIImageView! // private var photoImage:UIImageView!
private var icloudImage:UIImageView! // private var icloudImage:UIImageView!
private var photoLabel:UILabel? // private var photoLabel:UILabel?
private var icloudLabel:UILabel? // private var icloudLabel:UILabel?
// private var animationLabel:UILabel? // private var animationLabel:UILabel?
private var contentView2:UIView? private var contentView2:UIView?
private var contentView2Title:UILabel? private var contentView2Title:UILabel?
...@@ -41,7 +41,7 @@ class HomePayView:UIView { ...@@ -41,7 +41,7 @@ class HomePayView:UIView {
private var trailTitle:UILabel! private var trailTitle:UILabel!
private var tipsView:UIView! private var tipsView:UIView!
private var memSize:UILabel! private var memSize:UILabel!
var sizeLabel:UILabel! // var sizeLabel:UILabel!
var type = 0 { var type = 0 {
didSet { didSet {
...@@ -100,12 +100,12 @@ class HomePayView:UIView { ...@@ -100,12 +100,12 @@ class HomePayView:UIView {
lazy var animationView:LottieAnimationView = { lazy var animationView:LottieAnimationView = {
let animationView = LottieAnimationView(name: "onboardingStorageLight") let animationView = LottieAnimationView(name: "iapAnimation")
animationView.loopMode = .loop
animationView.animationSpeed = -1.0 // animationView.animationSpeed = -1.0
animationView.layer.cornerRadius = 8 // animationView.layer.cornerRadius = 8
animationView.contentMode = .scaleAspectFill // animationView.contentMode = .scaleAspectFill
animationView.clipsToBounds = true // animationView.clipsToBounds = true
return animationView return animationView
}() }()
...@@ -161,90 +161,93 @@ class HomePayView:UIView { ...@@ -161,90 +161,93 @@ class HomePayView:UIView {
} }
titleLabel2?.sizeToFit() titleLabel2?.sizeToFit()
photoImage = UIImageView(image: UIImage(named: "img_photos_start"))
self.addSubview(photoImage)
photoImage.snp.makeConstraints { make in // photoImage = UIImageView(image: UIImage(named: "img_photos_start"))
make.top.equalTo(titleLabel2!.snp.bottom).offset(38.RH()) // self.addSubview(photoImage)
make.left.equalTo(48.RW()) //
make.size.equalTo(64.RW()) // photoImage.snp.makeConstraints { make in
} // make.top.equalTo(titleLabel2!.snp.bottom).offset(38.RH())
// make.left.equalTo(48.RW())
icloudImage = UIImageView(image: UIImage(named: "img_file_guide")) // make.size.equalTo(64.RW())
self.addSubview(icloudImage) // }
//
icloudImage.snp.makeConstraints { make in // icloudImage = UIImageView(image: UIImage(named: "img_file_guide"))
make.left.equalTo(photoImage.snp.right).offset(24.RW()) // self.addSubview(icloudImage)
make.centerY.equalTo(photoImage) //
make.size.equalTo(64.RW()) // icloudImage.snp.makeConstraints { make in
} // make.left.equalTo(photoImage.snp.right).offset(24.RW())
// make.centerY.equalTo(photoImage)
photoLabel = UILabel() // make.size.equalTo(64.RW())
photoLabel?.text = "403" // }
photoLabel?.backgroundColor = UIColor.colorWithHex(hexStr: "#EB4545") //
photoLabel?.font = UIFont.scaledSystemFont(ofSize: 10, weight: .bold) // photoLabel = UILabel()
photoLabel?.textAlignment = .center // photoLabel?.text = "403"
photoLabel?.textColor = .white // photoLabel?.backgroundColor = UIColor.colorWithHex(hexStr: "#EB4545")
photoLabel?.layer.cornerRadius = 12.RW() // photoLabel?.font = UIFont.scaledSystemFont(ofSize: 10, weight: .bold)
photoLabel?.layer.masksToBounds = true // photoLabel?.textAlignment = .center
self.addSubview(photoLabel!) // photoLabel?.textColor = .white
// photoLabel?.layer.cornerRadius = 12.RW()
photoLabel?.snp.makeConstraints { make in // photoLabel?.layer.masksToBounds = true
make.top.equalTo(photoImage.snp.top).offset(-12) // self.addSubview(photoLabel!)
make.right.equalTo(photoImage.snp.right).offset(12) //
make.size.equalTo(24.RW()) // photoLabel?.snp.makeConstraints { make in
} // make.top.equalTo(photoImage.snp.top).offset(-12)
// make.right.equalTo(photoImage.snp.right).offset(12)
photoLabel?.sizeToFit() // make.size.equalTo(24.RW())
// }
icloudLabel = UILabel() //
icloudLabel?.text = "217" // photoLabel?.sizeToFit()
icloudLabel?.textAlignment = .center //
icloudLabel?.font = UIFont.scaledSystemFont(ofSize: 10, weight: .bold) // icloudLabel = UILabel()
icloudLabel?.backgroundColor = UIColor.colorWithHex(hexStr: "#EB4545") // icloudLabel?.text = "217"
icloudLabel?.textColor = .white // icloudLabel?.textAlignment = .center
icloudLabel?.layer.cornerRadius = 12.RW() // icloudLabel?.font = UIFont.scaledSystemFont(ofSize: 10, weight: .bold)
icloudLabel?.layer.masksToBounds = true // icloudLabel?.backgroundColor = UIColor.colorWithHex(hexStr: "#EB4545")
self.addSubview(icloudLabel!) // icloudLabel?.textColor = .white
// icloudLabel?.layer.cornerRadius = 12.RW()
icloudLabel?.snp.makeConstraints { make in // icloudLabel?.layer.masksToBounds = true
make.top.equalTo(icloudImage.snp.top).offset(-12) // self.addSubview(icloudLabel!)
make.right.equalTo(icloudImage.snp.right).offset(12) //
make.size.equalTo(24.RW()) // icloudLabel?.snp.makeConstraints { make in
} // make.top.equalTo(icloudImage.snp.top).offset(-12)
// make.right.equalTo(icloudImage.snp.right).offset(12)
icloudLabel?.sizeToFit() // make.size.equalTo(24.RW())
// }
//
// icloudLabel?.sizeToFit()
//
self.addSubview(animationView) self.addSubview(animationView)
playAnimationWithDelay() playAnimationWithDelay()
animationView.snp.makeConstraints { make in animationView.snp.makeConstraints { make in
make.top.equalTo(icloudImage!.snp.bottom).offset(24)
make.centerX.equalToSuperview() make.centerX.equalToSuperview()
make.width.equalTo(280) make.top.equalTo(titleLabel2!.snp.bottom).offset(40)
make.height.equalTo(12.RW()) make.width.equalTo(285.RW())
} make.height.equalTo(142.RW())
let analysis = UILabel()
analysis.text = "Analysis completed"
analysis.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3")
analysis.font = UIFont.scaledSystemFont(ofSize: 14, weight: .bold)
addSubview(analysis)
analysis.snp.makeConstraints { make in
make.left.equalTo(animationView.snp.left)
make.top.equalTo(animationView.snp.bottom).offset(8)
}
memSize = UILabel()
memSize.text = "100%"
memSize.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3")
memSize.font = UIFont.scaledSystemFont(ofSize: 14, weight: .bold)
addSubview(memSize)
memSize.snp.makeConstraints { make in
make.right.equalTo(animationView.snp.right)
make.top.equalTo(animationView.snp.bottom).offset(8)
} }
//
// let analysis = UILabel()
// analysis.text = "Analysis completed"
// analysis.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3")
// analysis.font = UIFont.scaledSystemFont(ofSize: 14, weight: .bold)
// addSubview(analysis)
// analysis.snp.makeConstraints { make in
// make.left.equalTo(animationView.snp.left)
// make.top.equalTo(animationView.snp.bottom).offset(8)
// }
//
// memSize = UILabel()
// memSize.text = "100%"
// memSize.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3")
// memSize.font = UIFont.scaledSystemFont(ofSize: 14, weight: .bold)
// addSubview(memSize)
// memSize.snp.makeConstraints { make in
// make.right.equalTo(animationView.snp.right)
// make.top.equalTo(animationView.snp.bottom).offset(8)
// }
appleLabel = UILabel() appleLabel = UILabel()
...@@ -328,7 +331,7 @@ class HomePayView:UIView { ...@@ -328,7 +331,7 @@ class HomePayView:UIView {
privavye_Label.snp.makeConstraints { make in privavye_Label.snp.makeConstraints { make in
make.left.right.equalTo(self.contentView2!) make.left.right.equalTo(self.contentView2!)
make.top.equalTo(ppBtn.snp.bottom).offset(15) make.top.equalTo(ppBtn.snp.bottom).offset(15.RH())
make.bottom.equalToSuperview().offset(-10) make.bottom.equalToSuperview().offset(-10)
} }
self.layoutIfNeeded() self.layoutIfNeeded()
...@@ -336,18 +339,64 @@ class HomePayView:UIView { ...@@ -336,18 +339,64 @@ class HomePayView:UIView {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.01) { DispatchQueue.main.asyncAfter(deadline: .now() + 0.01) {
self.callBack(CommonPush.change) self.callBack(CommonPush.change)
} }
// if let url = Bundle.main.url(forResource: "iap", withExtension: "gif"),
// let data = try? Data(contentsOf: url) {
// let animatedImage = FLAnimatedImage(animatedGIFData: data)
// let imageView = FLAnimatedImageView()
// imageView.animatedImage = animatedImage
// imageView.startAnimating()
// addSubview(imageView)
//
// imageView.snp.makeConstraints { make in
// make.centerX.equalToSuperview()
// make.top.equalTo(titleLabel2!.snp.bottom).offset(40)
// make.width.equalTo(285.RW())
// make.height.equalTo(142.RW())
// }
// }
// let gifImageView = UIImageView()
// gifImageView.contentMode = .scaleAspectFit
// addSubview(gifImageView)
// gifImageView.snp.makeConstraints { make in
// make.centerX.equalToSuperview()
// make.top.equalTo(titleLabel2!.snp.bottom).offset(40)
// make.width.equalTo(285.RW())
// make.height.equalTo(142.RW())
// }
//
// // 从 Bundle 加载 GIF 文件
// guard let gifImage = try? UIImage(gifName: "iap.gif") else {
// print("Failed to load GIF")
// return
// }
//
// gifImageView.setGifImage(gifImage, loopCount: -1)
// gifImageView.startAnimatingGif()
// gifImageView.startAnimatingGif()
// // 设置 GIF 到 ImageView 并开始播放
// do {
// try gifImageView.setGifImage(gifImage, loopCount: 0) // 0 表示无限循环
// } catch {
// print("Error setting GIF: \(error)")
// }
} }
func playAnimationWithDelay() { func playAnimationWithDelay() {
// 播放动画 // 播放动画
animationView.play(fromProgress: 0.4, toProgress: 1, loopMode: .playOnce) {[weak self] finished in animationView.play()
if finished { // animationView.play(fromProgress: 0.4, toProgress: 1, loopMode: .playOnce) {[weak self] finished in
// 动画播放完成后,延迟 2 秒再重新播放 // if finished {
DispatchQueue.main.asyncAfter(deadline: .now() + 2) { // // 动画播放完成后,延迟 2 秒再重新播放
self?.playAnimationWithDelay() // DispatchQueue.main.asyncAfter(deadline: .now() + 2) {
} // self?.playAnimationWithDelay()
} // }
} // }
// }
} }
lazy var privavye_Label: UILabel = { lazy var privavye_Label: UILabel = {
...@@ -499,48 +548,48 @@ class HomePayView:UIView { ...@@ -499,48 +548,48 @@ class HomePayView:UIView {
} }
tipsView.snp.makeConstraints { make in tipsView.snp.makeConstraints { make in
make.top.equalTo(animationView.snp.bottom).offset(92.RH()) make.top.equalTo(titleLabel2!.snp.bottom).offset(230.RH())
make.left.right.equalToSuperview().inset(12) make.left.right.equalToSuperview().inset(12)
make.height.equalTo(110.RH()) make.height.equalTo(110.RH())
} }
let freespace = UILabel() // let freespace = UILabel()
freespace.font = UIFont.scaledSystemFont(ofSize: 12, weight: .bold) // freespace.font = UIFont.scaledSystemFont(ofSize: 12, weight: .bold)
freespace.text = "FREE SPACE" // freespace.text = "FREE SPACE"
freespace.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3") // freespace.textColor = UIColor.colorWithHex(hexStr: "#B3B3B3")
addSubview(freespace) // addSubview(freespace)
//
sizeLabel = UILabel() // sizeLabel = UILabel()
sizeLabel.text = "54.2 GB" // sizeLabel.text = "54.2 GB"
sizeLabel.textColor = UIColor.colorWithHex(hexStr: black3Color) // sizeLabel.textColor = UIColor.colorWithHex(hexStr: black3Color)
sizeLabel.font = UIFont.scaledSystemFont(ofSize: 12, weight: .bold) // sizeLabel.font = UIFont.scaledSystemFont(ofSize: 12, weight: .bold)
addSubview(sizeLabel) // addSubview(sizeLabel)
//
freespace.snp.makeConstraints { make in // freespace.snp.makeConstraints { make in
make.left.equalTo(icloudImage!.snp.right).offset(24.RW()) // make.left.equalTo(icloudImage!.snp.right).offset(24.RW())
make.top.equalTo(titleLabel2!.snp.bottom).offset(42) // make.top.equalTo(titleLabel2!.snp.bottom).offset(42)
make.height.equalTo(17.RW()) // make.height.equalTo(17.RW())
} // }
//
sizeLabel.snp.makeConstraints { make in // sizeLabel.snp.makeConstraints { make in
make.top.equalTo(freespace.snp.bottom).offset(0) // make.top.equalTo(freespace.snp.bottom).offset(0)
make.left.equalTo(icloudImage!.snp.right).offset(24) // make.left.equalTo(icloudImage!.snp.right).offset(24)
make.height.equalTo(39.RW()) // make.height.equalTo(39.RW())
} // }
//
let fullText = FileTool().formatBytes(FileTool().getStorageInfo(for: .free) ?? 0 ) // let fullText = FileTool().formatBytes(FileTool().getStorageInfo(for: .free) ?? 0 )
// 创建一个可变的富文本字符串 // // 创建一个可变的富文本字符串
let attributedString = NSMutableAttributedString(string: fullText) // let attributedString = NSMutableAttributedString(string: fullText)
// 设置前半部分(动态数值)的范围 // // 设置前半部分(动态数值)的范围
let regularFont = UIFont.scaledSystemFont(ofSize: 28, weight: .bold) // 设置常规字体大小 // let regularFont = UIFont.scaledSystemFont(ofSize: 28, weight: .bold) // 设置常规字体大小
attributedString.addAttribute(.font, value: regularFont, range: NSRange.init(location: 0,length:fullText.count-2)) // attributedString.addAttribute(.font, value: regularFont, range: NSRange.init(location: 0,length:fullText.count-2))
//
// 设置“GB”部分的范围并将字体大小设置为12 // // 设置“GB”部分的范围并将字体大小设置为12
let smallFont = UIFont.scaledSystemFont(ofSize: 12, weight: .bold) // 设置字体大小为12 // let smallFont = UIFont.scaledSystemFont(ofSize: 12, weight: .bold) // 设置字体大小为12
attributedString.addAttribute(.font, value: smallFont, range:NSRange.init(location: fullText.count-2, length: 2)) // attributedString.addAttribute(.font, value: smallFont, range:NSRange.init(location: fullText.count-2, length: 2))
//
// 将富文本赋值给UILabel // // 将富文本赋值给UILabel
sizeLabel.attributedText = attributedString // sizeLabel.attributedText = attributedString
payDueView.snp.makeConstraints { make in payDueView.snp.makeConstraints { make in
......
...@@ -36,7 +36,7 @@ class HomePayViewController:UIViewController { ...@@ -36,7 +36,7 @@ class HomePayViewController:UIViewController {
override func viewDidAppear(_ animated: Bool) { override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated) super.viewDidAppear(animated)
homePayView?.playAnimationWithDelay() // homePayView?.playAnimationWithDelay()
} }
override func viewDidDisappear(_ animated: Bool) { override func viewDidDisappear(_ animated: Bool) {
super.viewDidDisappear(animated) super.viewDidDisappear(animated)
......
//
// TrashViewController.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import UIKit
class TrashViewController: UIViewController {
var source:[TrashTypeEnum] = [.video,.other,.shot,.chat]
var contentH:CGFloat = 0
var contentScrollView:UIScrollView!
var delBtn:UIButton!
var currentType:TrashTypeEnum = .other
let pageCount = 4 // 总页数
var currentPage = 1
override func viewDidLoad() {
super.viewDidLoad()
configUI()
}
func configUI(){
view.backgroundColor = .white
contentScrollView = UIScrollView(frame: CGRect(x: 0, y: 0, width: ScreenW, height: contentH))
contentScrollView.contentSize = CGSize(width: ScreenW*4, height: view.height)
contentScrollView.isPagingEnabled = true
contentScrollView.showsHorizontalScrollIndicator = false
contentScrollView.showsVerticalScrollIndicator = false
contentScrollView.bounces = false
contentScrollView.delegate = self
contentScrollView.backgroundColor = UIColor.white
view.addSubview(contentScrollView)
delBtn = UIButton()
delBtn.setTitle("Delete", for: .normal)
delBtn.setTitleColor(.white, for: .normal)
delBtn.titleLabel?.font = UIFont.systemFont(ofSize: 16, weight: .semibold)
delBtn.backgroundColor = UIColor.colorWithHex(hexStr: "#0082FF")
delBtn.layer.cornerRadius = 10
view.addSubview(delBtn)
delBtn.snp.makeConstraints { make in
make.bottom.equalTo(-17)
make.left.right.equalToSuperview().inset(55)
make.height.equalTo(40)
}
let closeBtn = UIButton()
closeBtn.setImage(UIImage.init(named: "icon_close_bottom"), for: .normal)
view.addSubview(closeBtn)
closeBtn.snp.makeConstraints { make in
make.left.equalTo(16)
make.top.equalTo(12)
make.size.equalTo(18)
}
contentScrollView.addSubview(videoView)
contentScrollView.addSubview(otherView)
contentScrollView.addSubview(shotView)
contentScrollView.addSubview(chatView)
}
override func viewWillLayoutSubviews() {
super.viewWillLayoutSubviews()
let viewWidth = self.view.bounds.width
let viewHeight = self.view.bounds.height
contentH = viewHeight - 108
contentScrollView.frame = CGRect(x: 0, y: 34, width: viewWidth, height: contentH)
contentScrollView.contentSize = CGSize(width: viewWidth*4, height: contentH)
videoView.frame = CGRect(x: 0, y: 0, width: viewWidth, height: contentH)
otherView.frame = CGRect(x: viewWidth, y: 0, width: viewWidth, height: contentH)
shotView.frame = CGRect(x: viewWidth*2, y: 0, width: viewWidth, height: contentH)
chatView.frame = CGRect(x: viewWidth*3, y: 0, width: viewWidth, height: contentH)
}
lazy var videoView:TrashContenView = {
let videoView = TrashContenView()
videoView.trashType = .video
return videoView
}()
lazy var otherView:TrashContenView = {
let otherView = TrashContenView()
otherView.trashType = .other
return otherView
}()
lazy var shotView:TrashContenView = {
let shotView = TrashContenView()
shotView.trashType = .shot
return shotView
}()
lazy var chatView:TrashContenView = {
let chatView = TrashContenView()
chatView.trashType = .chat
return chatView
}()
//
// lazy var collectionView:UICollectionView = {
// let layout = UICollectionViewFlowLayout()
// layout.itemSize = CGSize(width: view.width, height: view.height)
// layout.scrollDirection = .horizontal
// layout.minimumInteritemSpacing = 0
// layout.minimumLineSpacing = 0
// let collectionView = UICollectionView(frame: CGRect(x: 0, y: 0, width: view.width, height: view.height), collectionViewLayout:layout)
// collectionView.isPagingEnabled = true
// collectionView.delegate = self
// collectionView.backgroundColor = .white
// collectionView.dataSource = self
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell0")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell1")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell2")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell3")
//
// return collectionView
// }()
}
//extension TrashViewController:UICollectionViewDelegate,UICollectionViewDataSource{
//
// func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
// return source.count
// }
//
// func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
// let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenViewCell\(indexPath.row)", for: indexPath) as! TrashContenViewCell
// cell.trashType = source[indexPath.row]
// return cell
// }
//
//}
extension TrashViewController:UIScrollViewDelegate{
func scrollViewDidScroll(_ scrollView: UIScrollView){
let offset = scrollView.contentOffset.x
print("滑动距离",offset)
let model = TrashPageScrollModel.init(offset: offset,page: currentPage)
NotificationCenter.default.post(name: .trashPageScroll, object: model)
}
// 当用户手指离开屏幕且滚动减速停止时调用
func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
let currentPage = Int(scrollView.contentOffset.x / scrollView.bounds.width)
print("当前页: \(currentPage + 1)/\(pageCount)")
self.currentPage = currentPage + 1
}
// 当使用 setContentOffset 或 scrollToRect 等方法触发的滚动动画结束时调用
func scrollViewDidEndScrollingAnimation(_ scrollView: UIScrollView) {
let currentPage = Int(scrollView.contentOffset.x / scrollView.bounds.width)
print("当前页: \(currentPage + 1)/\(pageCount)")
self.currentPage = currentPage + 1
}
}
//
// TrashUIModel.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import Foundation
enum TrashTypeEnum{
case video,other,shot,chat
var dbType:Int{
switch self {
case .video:
return 0
case .other:
return 1
case .shot:
return 2
case .chat:
return 3
}
}
}
struct TrashPageScrollModel{
var offset:CGFloat = 0
var page:Int = 1
}
//
// TrashContenAssetCell.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import UIKit
class TrashContenAssetCell: UICollectionViewCell {
@IBOutlet weak var assetImage: UIImageView!
override func awakeFromNib() {
super.awakeFromNib()
}
override func layoutSubviews() {
super.layoutSubviews()
assetImage.cornerCut(radius: 8, corner: .allCorners)
}
var model:AssetModel?{
didSet{
guard let model = model else{
return
}
}
// assetImage.im
}
@IBAction func removeClick(_ sender: Any) {
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="23504" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_12" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="23506"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<collectionViewCell opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" id="gTV-IL-0wX" customClass="TrashContenAssetCell" customModule="PhoneManager" customModuleProvider="target">
<rect key="frame" x="0.0" y="0.0" width="138" height="145"/>
<autoresizingMask key="autoresizingMask"/>
<view key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center">
<rect key="frame" x="0.0" y="0.0" width="138" height="145"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<subviews>
<imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" translatesAutoresizingMaskIntoConstraints="NO" id="eLJ-qj-i27">
<rect key="frame" x="0.0" y="0.0" width="138" height="145"/>
<color key="backgroundColor" red="0.50196078430000002" green="0.50196078430000002" blue="0.50196078430000002" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</imageView>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="u7L-fz-1My">
<rect key="frame" x="110" y="0.0" width="28" height="34"/>
<constraints>
<constraint firstAttribute="width" constant="28" id="8iF-C1-mzq"/>
<constraint firstAttribute="height" constant="34" id="Yde-4P-yCb"/>
</constraints>
<inset key="imageEdgeInsets" minX="0.0" minY="0.0" maxX="2.2250738585072014e-308" maxY="0.0"/>
<state key="normal" image="icon_trash_remove"/>
<connections>
<action selector="removeClick:" destination="gTV-IL-0wX" eventType="touchUpInside" id="Kdg-c7-RsH"/>
</connections>
</button>
</subviews>
</view>
<viewLayoutGuide key="safeArea" id="ZTg-uK-7eu"/>
<constraints>
<constraint firstAttribute="trailing" secondItem="eLJ-qj-i27" secondAttribute="trailing" id="0rT-cN-YvO"/>
<constraint firstItem="u7L-fz-1My" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" id="Ma5-yx-gre"/>
<constraint firstItem="eLJ-qj-i27" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" id="Qwo-N1-6WM"/>
<constraint firstAttribute="trailing" secondItem="u7L-fz-1My" secondAttribute="trailing" id="Toq-Ra-cSL"/>
<constraint firstAttribute="bottom" secondItem="eLJ-qj-i27" secondAttribute="bottom" id="Yz5-xE-glP"/>
<constraint firstItem="eLJ-qj-i27" firstAttribute="leading" secondItem="gTV-IL-0wX" secondAttribute="leading" id="mnv-e0-PKX"/>
</constraints>
<size key="customSize" width="138" height="145"/>
<connections>
<outlet property="assetImage" destination="eLJ-qj-i27" id="KYz-cL-qDY"/>
</connections>
<point key="canvasLocation" x="155.72519083969465" y="53.169014084507047"/>
</collectionViewCell>
</objects>
<resources>
<image name="icon_trash_remove" width="16" height="16"/>
</resources>
</document>
//
// TrashContenTitleCell.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import UIKit
class TrashContenTitleCell: UICollectionViewCell {
@IBOutlet weak var title: UILabel!
@IBOutlet weak var contentL: UILabel!
var lineOne: UIView!
var lineTwo: UIView!
var lineThree: UIView!
var lineFour: UIView!
var scrollLine:UIView!
let lineW:CGFloat = (ScreenW - 62) / 4.0
let selectColor:UIColor = UIColor.colorWithHex(hexStr: "#0082FF")
let normalColor:UIColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
var trashType:TrashTypeEnum = .video{
didSet{
switch trashType {
case .video:
scrollLine.frame = lineOne.frame
case .other:
scrollLine.frame = lineTwo.frame
case .shot:
scrollLine.frame = lineThree.frame
case .chat:
scrollLine.frame = lineFour.frame
}
}
}
override func awakeFromNib() {
super.awakeFromNib()
lineOne = UIView()
lineOne.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
addSubview(lineOne)
lineTwo = UIView()
lineTwo.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
addSubview(lineTwo)
lineThree = UIView()
lineThree.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
addSubview(lineThree)
lineFour = UIView()
lineFour.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
addSubview(lineFour)
lineOne.frame = CGRect(x: 16, y: 62, width: lineW, height: 6)
lineTwo.frame = CGRect(x: 10+lineOne.rightX, y: 62, width: lineW, height: 6)
lineThree.frame = CGRect(x: 10+lineTwo.rightX, y: 62, width: lineW, height: 6)
lineFour.frame = CGRect(x: 10+lineThree.rightX, y: 62, width: lineW, height: 6)
scrollLine = UIView()
scrollLine.backgroundColor = UIColor.colorWithHex(hexStr: "#0082FF")
scrollLine.frame = lineOne.frame
addSubview(scrollLine)
scrollLine.cornerCut(radius: 2, corner: .allCorners)
NotificationCenter.default.addObserver(forName: .trashPageScroll, object: nil, queue: nil) {[weak self] notif in
guard let weakSelf = self else { return }
guard let offset = notif.object as? CGFloat else{
return
}
switch weakSelf.trashType {
case .video:
if offset > 0{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineOne.frame
}
}
case .other:
if offset > ScreenW{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}
else if offset == ScreenW{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}
else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineOne.frame
}
}
case .shot:
if offset > ScreenW * 2{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineFour.frame
}
}
else if offset == ScreenW * 2{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}
else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}
case .chat:
if offset < ScreenW * 3{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineFour.frame
}
}
}
}
}
override func layoutSubviews() {
super.layoutSubviews()
}
// func reset(){
// lineOne.backgroundColor = trashType.normalColor
// lineTwo.backgroundColor = trashType.normalColor
// lineThree.backgroundColor = trashType.normalColor
// lineFour.backgroundColor = trashType.normalColor
// }
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="23504" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_12" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="23506"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<collectionViewCell opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" id="gTV-IL-0wX" customClass="TrashContenTitleCell" customModule="PhoneManager" customModuleProvider="target">
<rect key="frame" x="0.0" y="0.0" width="367" height="96"/>
<autoresizingMask key="autoresizingMask"/>
<view key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center">
<rect key="frame" x="0.0" y="0.0" width="367" height="96"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<subviews>
<view contentMode="scaleToFill" translatesAutoresizingMaskIntoConstraints="NO" id="2ql-I6-tLt">
<rect key="frame" x="0.0" y="0.0" width="367" height="84"/>
<subviews>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="Trash Can · Video" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="8Ks-xQ-36T">
<rect key="frame" x="16" y="0.0" width="162" height="24"/>
<fontDescription key="fontDescription" type="system" weight="medium" pointSize="20"/>
<color key="textColor" white="0.0" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<nil key="highlightedColor"/>
</label>
<label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="1 video · 156.9MB" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="ded-ac-qdM">
<rect key="frame" x="16" y="28" width="133.66666666666666" height="22"/>
<constraints>
<constraint firstAttribute="height" constant="22" id="vHf-wv-0RL"/>
</constraints>
<fontDescription key="fontDescription" type="system" weight="medium" pointSize="16"/>
<color key="textColor" red="0.40000000000000002" green="0.40000000000000002" blue="0.40000000000000002" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<nil key="highlightedColor"/>
</label>
</subviews>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<constraints>
<constraint firstItem="8Ks-xQ-36T" firstAttribute="leading" secondItem="2ql-I6-tLt" secondAttribute="leading" constant="16" id="D5d-JF-WWs"/>
<constraint firstItem="8Ks-xQ-36T" firstAttribute="top" secondItem="2ql-I6-tLt" secondAttribute="top" id="Ost-JT-11u"/>
<constraint firstItem="ded-ac-qdM" firstAttribute="top" secondItem="8Ks-xQ-36T" secondAttribute="bottom" constant="4" id="RUR-iU-J2f"/>
<constraint firstItem="ded-ac-qdM" firstAttribute="leading" secondItem="2ql-I6-tLt" secondAttribute="leading" constant="16" id="rpq-9I-gZF"/>
</constraints>
</view>
</subviews>
</view>
<viewLayoutGuide key="safeArea" id="ZTg-uK-7eu"/>
<color key="backgroundColor" red="0.94901960784313721" green="0.96470588235294119" blue="0.9882352941176471" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstItem="2ql-I6-tLt" firstAttribute="leading" secondItem="gTV-IL-0wX" secondAttribute="leading" id="VTc-X8-1c5"/>
<constraint firstAttribute="trailing" secondItem="2ql-I6-tLt" secondAttribute="trailing" id="fR2-bt-dJM"/>
<constraint firstAttribute="bottom" secondItem="2ql-I6-tLt" secondAttribute="bottom" constant="12" id="huu-EZ-MHP"/>
<constraint firstItem="2ql-I6-tLt" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" id="lgE-St-BlS"/>
</constraints>
<size key="customSize" width="367" height="96"/>
<connections>
<outlet property="contentL" destination="ded-ac-qdM" id="il0-p1-cJv"/>
<outlet property="title" destination="8Ks-xQ-36T" id="Zql-BY-4sx"/>
</connections>
<point key="canvasLocation" x="330.53435114503816" y="35.91549295774648"/>
</collectionViewCell>
</objects>
<resources>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources>
</document>
//
// TrashContenView.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import UIKit
class TrashContenView: UIView {
var collectionView: UICollectionView!
var typeLabel:UILabel!
var sizeLabel:UILabel!
var lineOne: UIView!
var lineTwo: UIView!
var lineThree: UIView!
var lineFour: UIView!
var scrollLine:UIView!
let lineW:CGFloat = (ScreenW - 62) / 4.0
var dataSource:[AssetModel] = []
override init(frame: CGRect) {
super.init(frame: frame)
backgroundColor = .white
configUI()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
func getData(){
// dataSource = TrashDatabase.shared.queryByMediaType(trashType.dbType).compactMap({ (localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int) in
// return AssetModel.init(localIdentifier: localIdentifier, assetSize: assetSize, createDate: createDate)
// })
collectionView.reloadData()
}
func configUI(){
let layout = UICollectionViewFlowLayout()
layout.minimumInteritemSpacing = 12
layout.minimumLineSpacing = 12
layout.sectionInset = UIEdgeInsets(top: 20, left: 16, bottom: 0, right: 16)
layout.itemSize = CGSize(width: (ScreenW-56)/3, height: (ScreenW-56)/3)
collectionView = UICollectionView.init(frame: CGRect.zero,collectionViewLayout: layout)
collectionView.delegate = self
collectionView.backgroundColor = UIColor.colorWithHex(hexStr: "#F2F6FC")
collectionView.dataSource = self
collectionView.register(UINib(nibName: "TrashContenAssetCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenAssetCell")
// collectionView.register(UINib(nibName: "TrashContenTitleCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenTitleCell")
addSubview(collectionView)
let topView = UIView()
topView.backgroundColor = .white
addSubview(topView)
topView.snp.makeConstraints { make in
make.top.left.right.equalToSuperview()
make.height.equalTo(82)
}
typeLabel = UILabel()
typeLabel.text = "Trash Can · Video"
typeLabel.textColor = .black
typeLabel.font = UIFont.systemFont(ofSize: 20, weight: .medium)
topView.addSubview(typeLabel)
sizeLabel = UILabel()
sizeLabel.text = "1 video · 156.9MB"
sizeLabel.textColor = UIColor.colorWithHex(hexStr: "#666666")
sizeLabel.font = UIFont.systemFont(ofSize: 16, weight: .medium)
topView.addSubview(sizeLabel)
typeLabel.snp.makeConstraints { make in
make.left.equalTo(16)
make.top.equalTo(0)
}
sizeLabel.snp.makeConstraints { make in
make.left.equalTo(16)
make.top.equalTo(30)
}
collectionView.snp.makeConstraints { make in
make.left.bottom.right.equalToSuperview()
make.top.equalTo(sizeLabel.snp.bottom).offset(32)
}
lineOne = UIView()
lineOne.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
topView.addSubview(lineOne)
lineTwo = UIView()
lineTwo.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
topView.addSubview(lineTwo)
lineThree = UIView()
lineThree.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
topView.addSubview(lineThree)
lineFour = UIView()
lineFour.backgroundColor = UIColor.colorWithHex(hexStr: "#E5E5E5")
topView.addSubview(lineFour)
lineOne.frame = CGRect(x: 16, y: 62, width: lineW, height: 6)
lineTwo.frame = CGRect(x: 10+lineOne.rightX, y: 62, width: lineW, height: 6)
lineThree.frame = CGRect(x: 10+lineTwo.rightX, y: 62, width: lineW, height: 6)
lineFour.frame = CGRect(x: 10+lineThree.rightX, y: 62, width: lineW, height: 6)
scrollLine = UIView()
scrollLine.backgroundColor = UIColor.colorWithHex(hexStr: "#0082FF")
scrollLine.frame = lineOne.frame
topView.addSubview(scrollLine)
NotificationCenter.default.addObserver(forName: .trashPageScroll, object: nil, queue: nil) {[weak self] notif in
guard let weakSelf = self else { return }
guard let model = notif.object as? TrashPageScrollModel else{
return
}
let offset = model.offset
let page = model.page
switch weakSelf.trashType {
case .video:
if page == 1{
if offset > 0{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineOne.frame
}
}
}else{
weakSelf.scrollLine.frame = weakSelf.lineOne.frame
}
case .other:
if page == 2{
if offset > ScreenW{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}
else if offset == ScreenW{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}
else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineOne.frame
}
}
}else{
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
case .shot:
if page == 3{
if offset > ScreenW * 2{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineFour.frame
}
}
else if offset == ScreenW * 2{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}
else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineTwo.frame
}
}
}else{
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
case .chat:
if page == 4{
if offset < ScreenW * 3{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineThree.frame
}
}else{
UIView.animate(withDuration: 0.2) {
weakSelf.scrollLine.frame = weakSelf.lineFour.frame
}
}
}else{
weakSelf.scrollLine.frame = weakSelf.lineFour.frame
}
}
}
}
override func layoutSubviews() {
super.layoutSubviews()
scrollLine.cornerCut(radius: 2, corner: .allCorners)
lineOne.cornerCut(radius: 2, corner: .allCorners)
lineTwo.cornerCut(radius: 2, corner: .allCorners)
lineThree.cornerCut(radius: 2, corner: .allCorners)
lineFour.cornerCut(radius: 2, corner: .allCorners)
// collectionView.frame = self.bounds
}
var trashType:TrashTypeEnum = .video{
didSet{
switch trashType {
case .video:
scrollLine.frame = lineOne.frame
case .other:
scrollLine.frame = lineTwo.frame
case .shot:
scrollLine.frame = lineThree.frame
case .chat:
scrollLine.frame = lineFour.frame
}
getData()
}
}
}
extension TrashContenView:UICollectionViewDelegate,UICollectionViewDataSource,UICollectionViewDelegateFlowLayout{
func numberOfSections(in collectionView: UICollectionView) -> Int{
return 1
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return dataSource.count
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenAssetCell", for: indexPath) as! TrashContenAssetCell
return cell
}
}
//
// TrashContenViewCell.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import UIKit
class TrashContenViewCell: UICollectionViewCell {
@IBOutlet weak var collectionView: UICollectionView!
var trashType:TrashTypeEnum = .video{
didSet{
collectionView.reloadData()
}
}
override func awakeFromNib() {
super.awakeFromNib()
configUI()
}
func configUI(){
let layout = UICollectionViewFlowLayout()
layout.minimumInteritemSpacing = 12
layout.minimumLineSpacing = 12
collectionView.setCollectionViewLayout(layout, animated: false)
collectionView.delegate = self
collectionView.dataSource = self
collectionView.register(UINib(nibName: "TrashContenAssetCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenAssetCell")
collectionView.register(UINib(nibName: "TrashContenTitleCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenTitleCell")
}
}
extension TrashContenViewCell:UICollectionViewDelegate,UICollectionViewDataSource,UICollectionViewDelegateFlowLayout{
func numberOfSections(in collectionView: UICollectionView) -> Int{
return 2
}
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return section == 0 ? 1 : 20
}
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
if indexPath.section == 0{
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenTitleCell", for: indexPath) as! TrashContenTitleCell
cell.trashType = trashType
return cell
}
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenAssetCell", for: indexPath) as! TrashContenAssetCell
return cell
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize{
return indexPath.section == 0 ? CGSize(width: ScreenW, height: 92) : CGSize(width: (ScreenW-56)/3, height: (ScreenW-56)/3)
}
func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, insetForSectionAt section: Int) -> UIEdgeInsets{
return section == 0 ? UIEdgeInsets() : UIEdgeInsets(top: 0, left: 16, bottom: 0, right: 16)
}
}
<?xml version="1.0" encoding="UTF-8"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="23504" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" useSafeAreas="YES" colorMatched="YES">
<device id="retina6_12" orientation="portrait" appearance="light"/>
<dependencies>
<deployment identifier="iOS"/>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="23506"/>
<capability name="Safe area layout guides" minToolsVersion="9.0"/>
<capability name="System colors in document resources" minToolsVersion="11.0"/>
<capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/>
</dependencies>
<objects>
<placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/>
<placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/>
<collectionViewCell opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" id="gTV-IL-0wX" customClass="TrashContenViewCell" customModule="PhoneManager" customModuleProvider="target">
<rect key="frame" x="0.0" y="0.0" width="361" height="651"/>
<autoresizingMask key="autoresizingMask"/>
<view key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center">
<rect key="frame" x="0.0" y="0.0" width="361" height="651"/>
<autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
<subviews>
<collectionView clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="scaleToFill" dataMode="none" translatesAutoresizingMaskIntoConstraints="NO" id="Odc-yC-eFI">
<rect key="frame" x="0.0" y="34" width="361" height="543"/>
<color key="backgroundColor" systemColor="systemBackgroundColor"/>
<collectionViewFlowLayout key="collectionViewLayout" minimumLineSpacing="10" minimumInteritemSpacing="10" id="wcT-3C-X99">
<size key="itemSize" width="128" height="128"/>
<size key="headerReferenceSize" width="0.0" height="0.0"/>
<size key="footerReferenceSize" width="0.0" height="0.0"/>
<inset key="sectionInset" minX="0.0" minY="0.0" maxX="0.0" maxY="0.0"/>
</collectionViewFlowLayout>
</collectionView>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="Vwh-eI-xUZ">
<rect key="frame" x="12" y="12" width="18" height="22"/>
<inset key="imageEdgeInsets" minX="0.0" minY="0.0" maxX="2.2250738585072014e-308" maxY="0.0"/>
<state key="normal" image="icon_close_bottom"/>
</button>
<button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="tAl-Ag-gWh">
<rect key="frame" x="55" y="594" width="251" height="40"/>
<color key="backgroundColor" red="0.0" green="0.50980392156862742" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
<constraints>
<constraint firstAttribute="height" constant="40" id="Vfn-jJ-kAL"/>
</constraints>
<fontDescription key="fontDescription" type="system" weight="semibold" pointSize="16"/>
<inset key="imageEdgeInsets" minX="0.0" minY="0.0" maxX="2.2250738585072014e-308" maxY="0.0"/>
<state key="normal" title="Delete"/>
<userDefinedRuntimeAttributes>
<userDefinedRuntimeAttribute type="number" keyPath="Radius">
<real key="value" value="10"/>
</userDefinedRuntimeAttribute>
</userDefinedRuntimeAttributes>
</button>
</subviews>
</view>
<viewLayoutGuide key="safeArea" id="ZTg-uK-7eu"/>
<color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
<constraints>
<constraint firstItem="Odc-yC-eFI" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" constant="34" id="0pH-QH-0Nh"/>
<constraint firstAttribute="trailing" secondItem="Odc-yC-eFI" secondAttribute="trailing" id="6da-yv-Mra"/>
<constraint firstAttribute="bottom" secondItem="Odc-yC-eFI" secondAttribute="bottom" constant="74" id="DXX-tt-ruW"/>
<constraint firstItem="tAl-Ag-gWh" firstAttribute="leading" secondItem="gTV-IL-0wX" secondAttribute="leading" constant="55" id="LPh-dc-mJz"/>
<constraint firstAttribute="trailing" secondItem="tAl-Ag-gWh" secondAttribute="trailing" constant="55" id="Lf5-pG-yLg"/>
<constraint firstItem="Odc-yC-eFI" firstAttribute="leading" secondItem="gTV-IL-0wX" secondAttribute="leading" id="OS7-L9-lZo"/>
<constraint firstItem="Vwh-eI-xUZ" firstAttribute="leading" secondItem="gTV-IL-0wX" secondAttribute="leading" constant="12" id="i0S-Ce-fxf"/>
<constraint firstItem="tAl-Ag-gWh" firstAttribute="top" secondItem="Odc-yC-eFI" secondAttribute="bottom" constant="17" id="sgV-i8-XdP"/>
<constraint firstItem="Vwh-eI-xUZ" firstAttribute="top" secondItem="gTV-IL-0wX" secondAttribute="top" constant="12" id="ulu-1z-3ov"/>
</constraints>
<size key="customSize" width="361" height="651"/>
<connections>
<outlet property="collectionView" destination="Odc-yC-eFI" id="blD-2P-agx"/>
</connections>
<point key="canvasLocation" x="329.00763358778624" y="232.04225352112678"/>
</collectionViewCell>
</objects>
<resources>
<image name="icon_close_bottom" width="18" height="18"/>
<systemColor name="systemBackgroundColor">
<color white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/>
</systemColor>
</resources>
</document>
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
//
// NewGuideThreeCell.swift
// PhoneManager
//
// Created by edy on 2025/4/28.
//
import UIKit
class NewGuideThreeCell: UICollectionViewCell {
override func awakeFromNib() {
super.awakeFromNib()
// Initialization code
}
}
...@@ -179,7 +179,17 @@ extension UIView { ...@@ -179,7 +179,17 @@ extension UIView {
// 将渐变图层添加到视图的最底层 // 将渐变图层添加到视图的最底层
self.layer.insertSublayer(gradientLayer, at: 0) self.layer.insertSublayer(gradientLayer, at: 0)
} }
/// 视图最右边的x坐标(origin.x + width)
var rightX: CGFloat {
return frame.maxX
}
/// 视图最底部的y坐标(origin.y + height)
var bottomY: CGFloat {
return frame.maxY
}
} }
......
//
// EXNotification.swift
// PhoneManager
//
// Created by edy on 2025/5/11.
//
import Foundation
extension NSNotification.Name {
static let trashPageScroll: NSNotification.Name = NSNotification.Name(rawValue: "trashPageScroll")
}
# Uncomment the next line to define a global platform for your project # Uncomment the next line to define a global platform for your project
platform :ios, '14.0' platform :ios, '14.0'
source 'https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git' #source 'https://mirrors.tuna.tsinghua.edu.cn/git/CocoaPods/Specs.git'
target 'PhoneManager' do target 'PhoneManager' do
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment