Commit 21848fa1 authored by shenyong's avatar shenyong

引入新相册资源管理类

parent a3d9d9ea
//
// ActorManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import UIKit
// 图片状态管理 actor
actor PhotoSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
// 截图状态管理 actor
actor ScreenshotSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
// 视频状态管理 actor
actor VideoSimilarStateManager {
private var timeGroups: [TimeGroupModel] = []
private var similarGroups: [SimilarGroupModel] = []
private var pendingSimilarGroups: [SimilarGroupModel] = []
private var processedGroupCount: Int = 0
private var assetsImageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
func appendTimeGroup(_ group: TimeGroupModel) {
timeGroups.append(group)
}
func appendSimilarGroup(_ group: SimilarGroupModel) {
pendingSimilarGroups.append(group)
processedGroupCount += 1
}
func getAllTimeGroups() -> [TimeGroupModel] {
return timeGroups
}
func getpendingSimilarGroups() -> [SimilarGroupModel] {
return pendingSimilarGroups
}
func getAllSimilarGroups() -> [SimilarGroupModel] {
return similarGroups
}
func getCachedImage(for identifier: String) -> UIImage? {
return assetsImageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
assetsImageCache[identifier] = image
}
func shouldSavePendingGroups() -> Bool {
return processedGroupCount >= 10
}
func getCachedHash(for identifier: String) async -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) async {
hashCache[identifier] = hash
}
func savePendingGroups() {
similarGroups.append(contentsOf: pendingSimilarGroups)
pendingSimilarGroups.removeAll()
processedGroupCount = 0
}
func loadStoredData(timeGroups: [TimeGroupModel], similarGroups: [SimilarGroupModel]) {
self.timeGroups = timeGroups
self.similarGroups = similarGroups
}
}
actor PhotoDuplicateStateManager {
private var duplicateGroups: [DuplicateGroupModel] = []
private var pendingDuplicateGroups: [DuplicateGroupModel] = []
// 缓存
private var imageCache: [String: UIImage] = [:]
private var hashCache: [String: String] = [:]
// MARK: - 公共方法
func loadStoredData(duplicateGroups: [DuplicateGroupModel]) {
self.duplicateGroups = duplicateGroups
}
func getAllDuplicateGroups() -> [DuplicateGroupModel] {
return duplicateGroups
}
func appendDuplicateGroup(_ group: DuplicateGroupModel) {
pendingDuplicateGroups.append(group)
}
func shouldSavePendingGroups() -> Bool {
return pendingDuplicateGroups.count >= 5
}
func savePendingGroups() {
duplicateGroups.append(contentsOf: pendingDuplicateGroups)
pendingDuplicateGroups.removeAll()
}
func getPendingDuplicateGroups() -> [DuplicateGroupModel] {
return pendingDuplicateGroups
}
// MARK: - 缓存相关
func getCachedImage(for identifier: String) -> UIImage? {
return imageCache[identifier]
}
func setCachedImage(_ image: UIImage, for identifier: String) {
imageCache[identifier] = image
}
func getCachedHash(for identifier: String) -> String? {
return hashCache[identifier]
}
func setCachedHash(_ hash: String, for identifier: String) {
hashCache[identifier] = hash
}
}
//
// PhotoDuplicateManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/8.
//
import Foundation
import Photos
import UIKit
@MainActor
class PhotoDuplicateManager: @unchecked Sendable {
static let shared = PhotoDuplicateManager()
private let stateManager = PhotoDuplicateStateManager() // 使用新的状态管理器
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("DuplicateTimeGroups.json").path
}
private var duplicateGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("DuplicateGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "DuplicateLatestPhotoTimestamp"
func findDuplicateAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 通知已缓存的结果
let cachedGroups = await stateManager.getAllDuplicateGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 按分辨率预分组
var resolutionGroups: [[PHAsset]] = []
var tempGroups: [String: [PHAsset]] = [:] // 临时用于分组
// 第一次遍历:收集相同分辨率的资源
for asset in assets {
let resolution = "\(asset.pixelWidth)x\(asset.pixelHeight)"
if tempGroups[resolution] == nil {
tempGroups[resolution] = []
}
tempGroups[resolution]?.append(asset)
}
// 第二次遍历:只保留有多个资源的组
resolutionGroups = tempGroups.values.filter { $0.count > 1 }
// 如果没有需要处理的组,直接返回缓存结果
if resolutionGroups.isEmpty {
let total = cachedGroups.map { $0.assets }
await MainActor.run {
completionHandler?(total)
}
return
}
let maxConcurrency = 4 // 最大并发数
let batchSize = max(1, resolutionGroups.count / maxConcurrency)
for batchIndex in stride(from: 0, to: resolutionGroups.count, by: batchSize) {
let endIndex = min(batchIndex + batchSize, resolutionGroups.count)
let batch = Array(resolutionGroups[batchIndex..<endIndex])
await withTaskGroup(of: Void.self) { group in
for assets in batch {
group.addTask { [weak self] in
guard let self = self else { return }
// 5.1 计算该组所有图片的hash
var hashGroups: [String: [PHAsset]] = [:]
for asset in assets {
if let hash = await self.getOrCalculateHash(for: asset) {
if hashGroups[hash] == nil {
hashGroups[hash] = []
}
hashGroups[hash]?.append(asset)
}
}
// 5.2 找出完全相同的组
let duplicateGroups = hashGroups.values.filter { group in
group.count > 1 && self.areAssetsExactlyDuplicate(group)
}
// 5.3 处理找到的重复组
for duplicateGroup in duplicateGroups {
let groupId = UUID().uuidString
let assetModels = await self.createAssetModels(from: duplicateGroup)
// 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 保存重复组
await self.stateManager.appendDuplicateGroup(
DuplicateGroupModel(groupId: groupId, assets: assetModels)
)
if await self.stateManager.shouldSavePendingGroups() {
await self.savePendingDuplicateGroups()
}
}
}
}
}
}
// 6. 完成处理
if await !stateManager.getPendingDuplicateGroups().isEmpty {
await self.savePendingDuplicateGroups()
}
let allGroups = await stateManager.getAllDuplicateGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
// MARK: - 辅助方法
nonisolated private func areAssetsExactlyDuplicate(_ assets: [PHAsset]) -> Bool {
guard let firstAsset = assets.first else { return false }
return assets.allSatisfy { asset in
// 检查分辨率完全相同
if asset.pixelWidth != firstAsset.pixelWidth ||
asset.pixelHeight != firstAsset.pixelHeight {
return false
}
// 检查文件大小完全相同
let firstSize = getAssetSize(firstAsset)
let currentSize = getAssetSize(asset)
if firstSize != currentSize {
return false
}
// 检查创建时间是否接近(避免连拍照片)
if let time1 = asset.creationDate,
let time2 = firstAsset.creationDate {
let timeDiff = abs(time1.timeIntervalSince(time2))
if timeDiff < 1.0 { // 1秒内的连拍照片不算重复
return false
}
}
return true
}
}
nonisolated private func getAssetSize(_ asset: PHAsset) -> Int64 {
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
return size
}
return 0
}
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
let size = self.getAssetSize(asset)
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: Double(size),
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 存储相关方法
extension PhotoDuplicateManager {
private func loadStoredData() async {
var loadedDuplicateGroups: [DuplicateGroupModel] = []
if let data = try? Data(contentsOf: URL(fileURLWithPath: duplicateGroupsPath)),
let groups = try? JSONDecoder().decode([DuplicateGroupModel].self, from: data) {
loadedDuplicateGroups = groups
}
await stateManager.loadStoredData(duplicateGroups: loadedDuplicateGroups)
}
private func savePendingDuplicateGroups() async {
await stateManager.savePendingGroups()
if let data = try? JSONEncoder().encode(await stateManager.getAllDuplicateGroups()) {
try? data.write(to: URL(fileURLWithPath: duplicateGroupsPath))
}
}
}
// MARK: - Hash计算相关方法
extension PhotoDuplicateManager {
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let hash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(hash, for: asset.localIdentifier)
}
continuation.resume(returning: hash)
} else {
continuation.resume(returning: nil)
}
}
}
}
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
}
//
// PhotoManager.swift
// PhotoManager
//
// Created by edy on 2025/4/22.
//
import Foundation
import Photos
import AVFoundation
import CoreML
import Vision
import UIKit
class PhotoManager:ObservableObject{
static let shared = PhotoManager()
private init() {
requestAuthorization()
}
// 添加 Published 属性
@Published private(set) var baseDataLoadingState: BaseDataLoadingState = .notLoaded
// 定义状态枚举
enum BaseDataLoadingState {
case notLoaded
case loading
case loaded
case failed(Error)
}
// MARK: - 基础配置
// 所有的媒体资源
var allAssets:[PHAsset] = []
// 照片
@Published private(set) var photosAssets:[PHAsset] = []
// 截图
@Published private(set) var screenShotAssets:[PHAsset] = []
// 视频
@Published private(set) var videoAssets:[PHAsset] = []
// 其他
@Published private(set) var otherAssets:[PHAsset] = []
// 相似图片分组
var similarModels:[[AssetModel]] = []
// 相似截图分组
var similarScreenShotModels:[[AssetModel]] = []
// 相似视频分组
var similarVideoModels:[[AssetModel]] = []
// 重复图片分组
var duplicateModels:[[AssetModel]] = []
// 截图
var screenShotModels:[AssetModel] = []
// 视频
var videoModels:[AssetModel] = []
// 其他
var otherModels:[AssetModel] = []
@Published private(set) var screenShotTotalSize:Int64 = 0
@Published private(set) var videoTotalSize:Int64 = 0
@Published private(set) var otherTotalSize:Int64 = 0
private var currentPage: Int = 0
private let pageSize: Int = 50 // 每次加载的数量
// MARK: -基础函数
// 获取相册权限
func requestAuthorization(completion: @escaping (Bool) -> Void) {
PHPhotoLibrary.requestAuthorization { status in
DispatchQueue.main.async {
completion(status == .authorized)
}
}
}
private func requestAuthorization(){
// 获取基础数据
requestAuthorization {[weak self] _ in
guard let weakSelf = self else { return }
weakSelf.getBaseAssetGroup()
}
}
// 获取基本资源
func getBaseAssetGroup() {
baseDataLoadingState = .loading
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let photoAllAssets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
let videoAllAssets = PHAsset.fetchAssets(with: .video, options: fetchOptions)
let fetchOptionsS = PHFetchOptions()
fetchOptionsS.predicate = NSPredicate(format: "mediaSubtypes == %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
let screenShotAllAssets = PHAsset.fetchAssets(with: .image, options: fetchOptionsS)
let photoAssetsArray = photoAllAssets.objects(at: IndexSet(0..<photoAllAssets.count))
let videoAssetsArray = videoAllAssets.objects(at: IndexSet(0..<videoAllAssets.count))
let screenShotArray = screenShotAllAssets.objects(at: IndexSet(0..<screenShotAllAssets.count))
let otherArray = photoAssetsArray.filter {!screenShotArray.contains($0) }
print("基本数据执行完毕")
// 在主线程更新状态
DispatchQueue.main.async {
self.photosAssets = photoAssetsArray
self.videoAssets = videoAssetsArray
self.screenShotAssets = screenShotArray
self.otherAssets = otherArray
self.baseDataLoadingState = .loaded
}
}
}
// 转化视频模型和获取大小
func convertVideoModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.videoModels = await convertAssetsToModel(for: self.videoAssets, mediaType: 2)
let duration = CFAbsoluteTimeGetCurrent() - start
print("其他图片转换总耗时: \(duration)秒")
let videoTotalSize = Int64(self.videoModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.videoTotalSize = videoTotalSize
complectionHandler?(self.videoModels,videoTotalSize)
}
}
}
// 转化其他图片模型和获取大小
func convertOtherPhotoModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.otherModels = await convertAssetsToModel(for: self.otherAssets, mediaType: 1)
let duration = CFAbsoluteTimeGetCurrent() - start
print("其他图片转换总耗时: \(duration)秒")
let otherTotalSize = Int64(self.otherModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.otherTotalSize = otherTotalSize
complectionHandler?(self.otherModels,otherTotalSize)
}
}
}
// 转化截图模型和获取大小
func convertScreenShotModels(complectionHandler:(([AssetModel],Int64) ->Void)?){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.screenShotModels = await convertAssetsToModel(for: self.screenShotAssets, mediaType: 1)
let duration = CFAbsoluteTimeGetCurrent() - start
print("截图转换总耗时: \(duration)秒")
let screenShotTotalSize = Int64(self.screenShotModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.screenShotTotalSize = screenShotTotalSize
complectionHandler?(self.otherModels,screenShotTotalSize)
}
}
}
}
// MARK: - 工具函数支持
extension PhotoManager{
// 获取文件大小
func getAssetSize(for asset:PHAsset) ->Int64{
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
return size
} else {
return 0
}
}
// 请求照片数据的大小
private func requestImageData(for asset: PHAsset, completion: @escaping (Int64) -> Void) {
let options = PHImageRequestOptions()
options.isSynchronous = true // 设置为同步,以在回调前完成任务
options.version = .original
PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { (data, _, _, _) in
let dataSize = data?.count ?? 0
completion(Int64(dataSize))
}
}
// 请求视频数据的大小
private func getVideoAssetSize(_ asset: PHAsset, completion: @escaping (Int64) -> Void) {
let options = PHVideoRequestOptions()
// 获取原始数据
options.version = .original
// 不去计算iclound资源
options.isNetworkAccessAllowed = true
PHImageManager.default().requestAVAsset(forVideo: asset, options: options) { (avAsset, _, _) in
if let urlAsset = avAsset as? AVURLAsset {
let size = try? urlAsset.url.resourceValues(forKeys: [.fileSizeKey]).fileSize ?? 0
completion(Int64(size ?? 0))
} else {
completion(0)
}
}
}
// 根据 localIdentifier 获取图片
func getImage(localIdentifier: String, targetSize: CGSize = PHImageManagerMaximumSize, completion: @escaping (UIImage?) -> Void) {
// 根据 localIdentifier 获取 PHAsset
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = fetchResult.firstObject else {
completion(nil)
return
}
// 配置图片请求选项
let options = PHImageRequestOptions()
options.version = .current
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true // 允许从 iCloud 下载
options.resizeMode = .exact
// 请求图片
PHImageManager.default().requestImage(
for: asset,
targetSize: targetSize,
contentMode: .aspectFit,
options: options
) { image, info in
DispatchQueue.main.async {
completion(image)
}
}
}
// 获取视频url
func getVideoURL(localIdentifier: String, completion: @escaping (URL?) -> Void) {
// 1. 通过本地标识符获取 PHAsset
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = fetchResult.firstObject else {
completion(nil)
return
}
// 2. 确保资产是视频类型
guard asset.mediaType == .video else {
completion(nil)
return
}
// 3. 设置视频请求选项
let options = PHVideoRequestOptions()
options.version = .original
options.deliveryMode = .highQualityFormat
options.isNetworkAccessAllowed = true
// 4. 请求视频资源
PHImageManager.default().requestAVAsset(forVideo: asset, options: options) { (avAsset, _, _) in
DispatchQueue.main.async {
if let urlAsset = avAsset as? AVURLAsset {
completion(urlAsset.url)
} else {
completion(nil)
}
}
}
}
// 根据文件大小直接格式化
func formatBytes(_ bytes: Int64) -> String {
let formatter = ByteCountFormatter()
formatter.allowedUnits = [.useKB, .useMB, .useGB] // 选择合适的单位
formatter.countStyle = .file
return formatter.string(fromByteCount: bytes)
}
}
// MARK: - 数据函数支持
extension PhotoManager{
// 获取一定页数的图片
func fetchImages(page: Int, completion: @escaping ([PHAsset]) -> Void) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchOptions.fetchLimit = pageSize * (page + 1) // 增加请求的数量
let allImages = PHAsset.fetchAssets(with: .image, options: fetchOptions)
var images = [PHAsset]()
allImages.enumerateObjects { (asset, _, _) in
images.append(asset)
}
completion(images)
}
// 获取一定页数的视频
func fetchVideos(page: Int, completion: @escaping ([PHAsset]) -> Void) {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
fetchOptions.fetchLimit = pageSize * (page + 1) // 增加请求的数量
let allVideos = PHAsset.fetchAssets(with: .video, options: fetchOptions)
var videos = [PHAsset]()
allVideos.enumerateObjects { (asset, _, _) in
videos.append(asset)
}
completion(videos)
}
// 获取指定类型的媒体资源
func fetchMediaAssets(type: MediaType, completion: @escaping ([PHAsset]) -> Void) {
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
var assets: PHFetchResult<PHAsset>
switch type {
case .video:
assets = PHAsset.fetchAssets(with: .video, options: fetchOptions)
case .screenshot:
// 使用系统属性判断截图
fetchOptions.predicate = NSPredicate(format: "mediaSubtype == %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
case .photo:
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
case .other:
// 排除视频和截图
let predicates = [
NSPredicate(format: "mediaType == %d", PHAssetMediaType.image.rawValue),
NSPredicate(format: "mediaSubtype != %d", PHAssetMediaSubtype.photoScreenshot.rawValue)
]
fetchOptions.predicate = NSCompoundPredicate(andPredicateWithSubpredicates: predicates)
assets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
}
var mediaAssets:[PHAsset] = []
assets.enumerateObjects {(asset, _, _) in
mediaAssets.append(asset)
}
DispatchQueue.main.async {
completion(mediaAssets)
}
}
}
// 获取所有资产的总数
func fetchTotalAssets(completion: @escaping ([PHAsset]) -> Void) {
DispatchQueue.global(qos: .background).async {
let fetchOptions = PHFetchOptions()
fetchOptions.sortDescriptors = [NSSortDescriptor(key: "creationDate", ascending: false)]
let allAssets = PHAsset.fetchAssets(with: .image, options: fetchOptions)
var assets = [PHAsset]()
allAssets.enumerateObjects { (asset, _, _) in
assets.append(asset)
}
DispatchQueue.main.async {
self.allAssets = assets
completion(assets)
}
}
}
}
extension PhotoManager{
func convertAssetsToModel(for assets: [PHAsset], mediaType: Int) async -> [AssetModel] {
let batchSize = 4 // 控制并发数量
var results: [AssetModel] = []
// 分批处理资产
for batch in stride(from: 0, to: assets.count, by: batchSize) {
let end = min(batch + batchSize, assets.count)
let currentBatch = Array(assets[batch..<end])
await withTaskGroup(of: AssetModel.self) { group in
for asset in currentBatch {
group.addTask {
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: Double(self.getAssetSize(for: asset)),
createDate: asset.creationDate ?? Date(),
mediaType: mediaType
)
}
}
// 收集当前批次的结果
for await result in group {
results.append(result)
}
}
}
return results
}
func getModelsData(){
Task{
let start = CFAbsoluteTimeGetCurrent()
self.screenShotModels = await convertAssetsToModel(for: self.screenShotAssets, mediaType: 1)
self.otherModels = await convertAssetsToModel(for: self.otherAssets, mediaType: 1)
self.videoModels = await convertAssetsToModel(for: self.videoAssets, mediaType: 2)
let duration = CFAbsoluteTimeGetCurrent() - start
print("转换总耗时: \(duration)秒")
let screenShotTotalSize = Int64(self.screenShotModels.reduce(0){$0+$1.assetSize})
let videoTotalSize = Int64(self.videoModels.reduce(0){$0+$1.assetSize})
let otherTotalSize = Int64(self.otherModels.reduce(0){$0+$1.assetSize})
await MainActor.run {
self.screenShotTotalSize = screenShotTotalSize
self.videoTotalSize = videoTotalSize
self.otherTotalSize = otherTotalSize
}
}
}
}
//
// AssetModel.swift
// CleanPhoto
//
// Created by edy on 2025/5/7.
//
import Foundation
struct AssetModel :Codable,Hashable {
var localIdentifier : String
var assetSize : Double
var createDate : Date
var mediaType:Int // 1 图片 2视频
init(localIdentifier: String, assetSize: Double, createDate: Date,mediaType:Int = 1) {
self.localIdentifier = localIdentifier
self.assetSize = assetSize
self.createDate = createDate
self.mediaType = mediaType
}
func hash(into hasher: inout Hasher) {
hasher.combine(localIdentifier)
hasher.combine(assetSize)
hasher.combine(createDate)
hasher.combine(mediaType)
}
static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool {
return lhs.localIdentifier == rhs.localIdentifier &&
lhs.assetSize == rhs.assetSize &&
lhs.createDate == rhs.createDate && lhs.mediaType == rhs.mediaType
}
}
struct AssetFileModel:Codable{
var videoAssets:[AssetModel] = []
var otherAssets:[AssetModel] = []
var screenShotAssets:[AssetModel] = []
var photosAssets:[AssetModel] = []
}
// 添加媒体类型枚举
enum MediaType {
case video
case screenshot
case photo
case other
}
extension MediaType{
var dbValue:String{
switch self {
case .video:
return "video"
case .screenshot:
return "screenshot"
case .photo:
return "photo"
case .other:
return "other"
}
}
}
// 时间组模型
struct TimeGroupModel: Codable {
let groupId: String
let startTime: TimeInterval
let endTime: TimeInterval
var isProcessed: Bool
}
// 相似图片组模型
struct SimilarGroupModel: Codable {
let groupId: String
var assets: [AssetModel]
}
// 重复图片组模型
struct DuplicateGroupModel: Codable {
let groupId: String
let assets: [AssetModel]
}
//
// PhotoSimilarManager.swift
// PhotoSimilar
//
// Created by edy on 2025/5/8.
//
import Foundation
import Photos
import UIKit
@MainActor
class PhotoSimilarManager: @unchecked Sendable {
static let shared = PhotoSimilarManager()
private let stateManager = PhotoSimilarStateManager()
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let fileSizeThreshold: Double = 0.05 // 文件大小相差阈值(5%)
private let resolutionThreshold: Double = 0.05 // 分辨率相差阈值(5%)
// private let hashDistanceThreshold: Int = 10 // pHash汉明距离阈值
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("TimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("SimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "LatestPhotoTimestamp"
func findSimilarAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestPhotoTimeKey)
if lastLatestTime == 0{
// 如果没拿到 说吗是第一次执行,直接拿第一个记录最新资源时间
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestPhotoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupAssetsByTimeWindow(newAssets)
let oldGroups = groupAssetsByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
// 获取已处理的时间组
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 处理新增照片,过滤掉已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
// 检查这个时间组是否已经处理过
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
print("开始处理分组,分组资源为:",unprocessedGroups.count)
let maxConcurrency = 6 // 最大并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.count == 0{
let total = cachedGroups.compactMap{$0.assets}
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
for unGroup in batch {
group.addTask { [weak self] in
// 原任务代码
guard let self = self else {
print("self 为 nil,任务提前退出")
return
}
// 6.1 按文件大小预分组
let sizeGroups = self.groupAssetsBySize(unGroup)
// 6.2 处理每个大小组
for sizeGroup in sizeGroups {
let similarGroups = await self.findSimilarInGroupUsingKMeans(sizeGroup)
if !similarGroups.isEmpty {
for similarGroup in similarGroups {
let groupId = UUID().uuidString
// 6.3 创建资源模型
let assetModels = await createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await stateManager.appendSimilarGroup(SimilarGroupModel(groupId: groupId, assets: assetModels))
if await stateManager.shouldSavePendingGroups() {
await savePendingSimilarGroups()
}
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
return await withCheckedContinuation { continuation in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
let model = AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
continuation.resume(returning: model)
}
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 分组辅助方法
extension PhotoSimilarManager{
nonisolated private func groupAssetsByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
nonisolated private func groupAssetsBySize(_ assets: [PHAsset]) -> [[PHAsset]] {
var sizeGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
// 查找大小相近的资产
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
if isFileSizeSimilar(asset, compareAsset) {
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
}
if currentGroup.count > 1 {
sizeGroups.append(currentGroup)
}
}
return sizeGroups
}
// 文件大小比较
nonisolated private func isFileSizeSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
let size1 = Double(asset1.pixelWidth * asset1.pixelHeight)
let size2 = Double(asset2.pixelWidth * asset2.pixelHeight)
let ratio = abs(size1 - size2) / max(size1, size2)
return ratio <= fileSizeThreshold
}
}
// MARK: - 存储相关方法
extension PhotoSimilarManager{
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
// 加载时间组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
// 加载相似组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
private func loadSimilarGroups() async -> [SimilarGroupModel] {
let groups = await stateManager.getAllSimilarGroups()
// 验证资源有效性
return groups.map { group in
var validAssets = group.assets
validAssets.removeAll { asset in
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [asset.localIdentifier], options: nil)
return fetchResult.firstObject == nil
}
return SimilarGroupModel(groupId: group.groupId, assets: validAssets)
}.filter { !$0.assets.isEmpty }
}
}
// MARK: - pHash获取
extension PhotoSimilarManager{
// 计算图片hash
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
print("返回缓存cachedHash")
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
print("返回缓存hash")
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
// print("开始获取图片hash")
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let tempHash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(tempHash, for: asset.localIdentifier)
}
continuation.resume(returning: tempHash)
} else {
continuation.resume(returning: nil)
}
}
}
}
// 计算图片hash
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
// 计算汉明距离
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
func cancelCurrentOperation() {
currentTask?.cancel()
}
}
// MARK: - 相似度聚类算法
extension PhotoSimilarManager{
// K-Means 聚类算法
func kMeansClustering(data: [[Double]], k: Int, maxIterations: Int = 100) -> [Int] {
guard data.count > 0 && k > 0 && k <= data.count else {
return []
}
var centroids = (0..<k).map { _ in data.randomElement()! }
var labels = Array(repeating: 0, count: data.count)
for _ in 0..<maxIterations {
var newCentroids = Array(repeating: Array(repeating: 0.0, count: data[0].count), count: k)
var clusterCounts = Array(repeating: 0, count: k)
// 分配数据点到最近的质心
for (i, point) in data.enumerated() {
var minDistance = Double.infinity
var closestCentroidIndex = 0
for (j, centroid) in centroids.enumerated() {
let distance = euclideanDistance(point, centroid)
if distance < minDistance && distance < 0.3 {
minDistance = distance
closestCentroidIndex = j
}
}
labels[i] = closestCentroidIndex
newCentroids[closestCentroidIndex] = newCentroids[closestCentroidIndex].enumerated().map { index, value in
value + point[index]
}
clusterCounts[closestCentroidIndex] += 1
}
// 更新质心
var hasChanged = false
for i in 0..<k {
if clusterCounts[i] > 0 {
let newCentroid = newCentroids[i].enumerated().map { index, value in
value / Double(clusterCounts[i])
}
if newCentroid != centroids[i] {
hasChanged = true
centroids[i] = newCentroid
}
}
}
// 如果质心没有变化,提前结束迭代
if !hasChanged {
break
}
}
return labels
}
// 计算欧几里得距离
func euclideanDistance(_ point1: [Double], _ point2: [Double]) -> Double {
let squaredSum = zip(point1, point2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(squaredSum)
}
// 将哈希值转换为数值向量
func hashToVector(_ hash: String) -> [Double] {
return hash.map { $0 == "1" ? 1.0 : 0.0 }
}
private func findSimilarInGroupUsingKMeans(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 获取所有资产的哈希值
let hashes = await withTaskGroup(of: String?.self) { group in
for asset in assets {
group.addTask {
return await self.getOrCalculateHash(for: asset)
}
}
var result: [String?] = []
for await hash in group {
result.append(hash)
}
return result
}
// 将哈希值转换为数值向量
let vectors = hashes.compactMap { $0.map { hashToVector($0) } }
// 使用 K-Means 聚类算法
let k = min(assets.count, 10) // 假设最多 10 个簇
let labels = kMeansClustering(data: vectors, k: k)
// 根据聚类结果分组
var clusters: [[PHAsset]] = Array(repeating: [], count: k)
for (i, label) in labels.enumerated() {
clusters[label].append(assets[i])
}
// 过滤掉只有一个元素的簇
return clusters.filter { $0.count > 1 }
}
}
// MARK: - 废弃方法
extension PhotoSimilarManager{
// private func findSimilarInGroup(_ assets: [PHAsset]) async -> [[PHAsset]] {
// let startTime = Date()
// print("开始执行 findSimilarInGroup,资产数量:", assets.count)
// guard !Task.isCancelled else {
// print("异步任务已取消")
// return []
// } // 添加取消检查
// var similarGroups: [[PHAsset]] = []
// let count = assets.count
// var isProcessed = Array(repeating: false, count: count)
//
// for i in 0..<count {
// if isProcessed[i] {
// continue
// }
//
// var currentGroup = [assets[i]]
// isProcessed[i] = true
//
// for j in (i + 1)..<count {
// if isProcessed[j] {
// continue
// }
//
// if await areAssetsSimilar(assets[i], assets[j]) {
// currentGroup.append(assets[j])
// isProcessed[j] = true
// }
// }
//
// if currentGroup.count > 1 {
// similarGroups.append(currentGroup)
// }
// }
// let endTime = Date()
// let elapsedTime = endTime.timeIntervalSince(startTime)
// print("拿到相似组内数据",similarGroups.count, ",耗时:", elapsedTime, "秒")
// return similarGroups
// }
// 相似度比较
// private func areAssetsSimilar(_ asset1: PHAsset, _ asset2: PHAsset) async -> Bool {
// // 1. 检查分辨率
// if !isResolutionSimilar(asset1, asset2) {
// return false
// }
// return await compareAssetHashes(asset1, asset2)
// }
//
// // 分辨率比较
// private func isResolutionSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
// let res1 = Double(asset1.pixelWidth * asset1.pixelHeight)
// let res2 = Double(asset2.pixelWidth * asset2.pixelHeight)
// let ratio = abs(res1 - res2) / max(res1, res2)
// return ratio <= resolutionThreshold
// }
//
// // 比较图片哈希值
// private func compareAssetHashes(_ asset1: PHAsset, _ asset2: PHAsset) async -> Bool {
// print("获取哈希值开始")
// // 获取或计算 hash
// async let hash1 = getOrCalculateHash(for: asset1)
// async let hash2 = getOrCalculateHash(for: asset2)
//
// guard let h1 = await hash1, let h2 = await hash2 else { return false }
// print("拿到的hash1\(h1),拿到的hash2\(h2)")
// let distance = calculateHammingDistance(h1, h2)
// return distance < hashDistanceThreshold
// }
// 获取phash
// private func getOrCalculateHash(for asset: PHAsset) async -> String? {
// // 从缓存获取图片
// if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
// return calculateImageHash(cachedImage)
// }
//
// let options = PHImageRequestOptions()
// options.version = .original
// let targetSize = CGSize(width: 32, height: 32)
//
// // 使用 async/await 替代 semaphore
// return await withCheckedContinuation { continuation in
//
// PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
// if let image = image, let self = self {
// let tempHash = self.calculateImageHash(image)
//
// // 保存到缓存
// Task {
// await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
// }
//
// continuation.resume(returning: tempHash)
// } else {
// continuation.resume(returning: nil)
// }
// }
// }
// }
// 创建资源模型
// private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
// return await withTaskGroup(of: AssetModel.self) { modelGroup in
// var models: [AssetModel] = []
//
// for asset in assets {
// modelGroup.addTask {
// return await withCheckedContinuation { continuation in
// let options = PHImageRequestOptions()
// options.isSynchronous = false
// options.version = .original
// options.isNetworkAccessAllowed = false
// PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
// let assetSize = Double(data?.count ?? 0)
// let model = AssetModel(
// localIdentifier: asset.localIdentifier,
// assetSize: assetSize,
// createDate: asset.creationDate ?? Date(),
// mediaType: 1
// )
// continuation.resume(returning: model)
// }
// }
// }
// }
//
// for await model in modelGroup {
// models.append(model)
// }
// return models
// }
// }
}
//
// ScreenShotSimilarManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import Photos
import UIKit
@MainActor
class ScreenshotSimilarJSONManager: @unchecked Sendable {
static let shared = ScreenshotSimilarJSONManager()
private let stateManager = PhotoSimilarStateManager()
private init() {}
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let fileSizeThreshold: Double = 0.01 // 文件大小相差阈值(1%)
private let resolutionThreshold: Double = 0.01 // 分辨率相差阈值(1%)
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("screenshotTimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("screenshotSimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
// 最新照片时间戳存储key
private let latestPhotoTimeKey = "screenshotLatestPhotoTimestamp"
func findSimilarAssets(in assets: [PHAsset],
mediaType: MediaType = .photo,
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
print("本地数据加载完成")
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestPhotoTimeKey)
if lastLatestTime == 0{
// 如果没拿到 说吗是第一次执行,直接拿第一个记录最新资源时间
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
print("通知已缓存的结果", cachedGroups.count)
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestPhotoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupAssetsByTimeWindow(newAssets)
let oldGroups = groupAssetsByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
// 获取已处理的时间组
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 处理新增照片,过滤掉已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
// 检查这个时间组是否已经处理过
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
print("开始处理分组,分组资源为:",unprocessedGroups.count)
let maxConcurrency = 3 // 最大并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.count == 0{
let total = cachedGroups.compactMap{$0.assets}
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
for unGroup in batch {
group.addTask { [weak self] in
// 原任务代码
guard let self = self else {
print("self 为 nil,任务提前退出")
return
}
// 6.1 按文件大小预分组
let sizeGroups = self.groupAssetsBySize(unGroup)
// 6.2 处理每个大小组
for sizeGroup in sizeGroups {
let similarGroups = await self.findSimilarInGroupUsingKMeans(sizeGroup)
if !similarGroups.isEmpty {
for similarGroup in similarGroups {
let groupId = UUID().uuidString
// 6.3 创建资源模型
let assetModels = await createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await stateManager.appendSimilarGroup(SimilarGroupModel(groupId: groupId, assets: assetModels))
if await stateManager.shouldSavePendingGroups() {
await savePendingSimilarGroups()
}
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
print("执行完毕")
completionHandler?(allGroups.map { $0.assets })
}
}
}
// 创建资源模型
// private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
// return await withTaskGroup(of: AssetModel.self) { modelGroup in
// var models: [AssetModel] = []
//
// for asset in assets {
// modelGroup.addTask {
// return await withCheckedContinuation { continuation in
// let options = PHImageRequestOptions()
// options.isSynchronous = false
// options.version = .original
// options.isNetworkAccessAllowed = false
// PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { data, _, _, _ in
// let assetSize = Double(data?.count ?? 0)
// let model = AssetModel(
// localIdentifier: asset.localIdentifier,
// assetSize: assetSize,
// createDate: asset.creationDate ?? Date(),
// mediaType: 1
// )
// continuation.resume(returning: model)
// }
// }
// }
// }
//
// for await model in modelGroup {
// models.append(model)
// }
// return models
// }
// }
private func createAssetModels(from assets: [PHAsset]) async -> [AssetModel] {
return await withTaskGroup(of: AssetModel.self) { modelGroup in
var models: [AssetModel] = []
for asset in assets {
modelGroup.addTask {
return await withCheckedContinuation { continuation in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
let model = AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 1
)
continuation.resume(returning: model)
}
}
}
for await model in modelGroup {
models.append(model)
}
return models
}
}
}
// MARK: - 分组辅助方法
extension ScreenshotSimilarJSONManager{
nonisolated private func groupAssetsByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
nonisolated private func groupAssetsBySize(_ assets: [PHAsset]) -> [[PHAsset]] {
var sizeGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
// 查找大小相近的资产
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
if isFileSizeSimilar(asset, compareAsset) {
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
}
if currentGroup.count > 1 {
sizeGroups.append(currentGroup)
}
}
return sizeGroups
}
// 文件大小比较
nonisolated private func isFileSizeSimilar(_ asset1: PHAsset, _ asset2: PHAsset) -> Bool {
let size1 = Double(asset1.pixelWidth * asset1.pixelHeight)
let size2 = Double(asset2.pixelWidth * asset2.pixelHeight)
let ratio = abs(size1 - size2) / max(size1, size2)
return ratio <= fileSizeThreshold
}
}
// MARK: - 存储相关方法
extension ScreenshotSimilarJSONManager{
//加载本地存储资源
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
// 加载时间组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
// 加载相似组数据
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
// 保存时间分组
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
// 保存到文件
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
private func loadSimilarGroups() async -> [SimilarGroupModel] {
let groups = await stateManager.getAllSimilarGroups()
// 验证资源有效性
return groups.map { group in
var validAssets = group.assets
validAssets.removeAll { asset in
let fetchResult = PHAsset.fetchAssets(withLocalIdentifiers: [asset.localIdentifier], options: nil)
return fetchResult.firstObject == nil
}
return SimilarGroupModel(groupId: group.groupId, assets: validAssets)
}.filter { !$0.assets.isEmpty }
}
}
// MARK: - pHash获取
extension ScreenshotSimilarJSONManager{
// 计算图片hash
private func getOrCalculateHash(for asset: PHAsset) async -> String? {
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
print("返回缓存cachedHash")
return cachedHash
}
if let cachedImage = await stateManager.getCachedImage(for: asset.localIdentifier) {
let hash = calculateImageHash(cachedImage)
print("返回缓存hash")
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
return hash
}
let options = PHImageRequestOptions()
options.version = .original
let targetSize = CGSize(width: 32, height: 32)
return await withCheckedContinuation { continuation in
PHImageManager.default().requestImage(for: asset, targetSize: targetSize, contentMode: .aspectFit, options: options) { [weak self] image, _ in
if let image = image, let self = self {
let tempHash = self.calculateImageHash(image)
Task {
await self.stateManager.setCachedImage(image, for: asset.localIdentifier)
await self.stateManager.setCachedHash(tempHash, for: asset.localIdentifier)
}
continuation.resume(returning: tempHash)
} else {
continuation.resume(returning: nil)
}
}
}
}
// 计算图片hash
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
// 计算汉明距离
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
func cancelCurrentOperation() {
currentTask?.cancel()
}
}
// MARK: - 相似度聚类算法
extension ScreenshotSimilarJSONManager{
// K-Means 聚类算法
func kMeansClustering(data: [[Double]], k: Int, maxIterations: Int = 100) -> [Int] {
guard data.count > 0 && k > 0 && k <= data.count else {
return []
}
var centroids = (0..<k).map { _ in data.randomElement()! }
var labels = Array(repeating: 0, count: data.count)
for _ in 0..<maxIterations {
var newCentroids = Array(repeating: Array(repeating: 0.0, count: data[0].count), count: k)
var clusterCounts = Array(repeating: 0, count: k)
// 分配数据点到最近的质心
for (i, point) in data.enumerated() {
var minDistance = Double.infinity
var closestCentroidIndex = 0
for (j, centroid) in centroids.enumerated() {
let distance = euclideanDistance(point, centroid)
if distance < minDistance && distance < 0.3 {
minDistance = distance
closestCentroidIndex = j
}
}
labels[i] = closestCentroidIndex
newCentroids[closestCentroidIndex] = newCentroids[closestCentroidIndex].enumerated().map { index, value in
value + point[index]
}
clusterCounts[closestCentroidIndex] += 1
}
// 更新质心
var hasChanged = false
for i in 0..<k {
if clusterCounts[i] > 0 {
let newCentroid = newCentroids[i].enumerated().map { index, value in
value / Double(clusterCounts[i])
}
if newCentroid != centroids[i] {
hasChanged = true
centroids[i] = newCentroid
}
}
}
// 如果质心没有变化,提前结束迭代
if !hasChanged {
break
}
}
return labels
}
// 计算欧几里得距离
func euclideanDistance(_ point1: [Double], _ point2: [Double]) -> Double {
let squaredSum = zip(point1, point2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(squaredSum)
}
// 将哈希值转换为数值向量
func hashToVector(_ hash: String) -> [Double] {
return hash.map { $0 == "1" ? 1.0 : 0.0 }
}
private func findSimilarInGroupUsingKMeans(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 获取所有资产的哈希值
let hashes = await withTaskGroup(of: String?.self) { group in
for asset in assets {
group.addTask {
return await self.getOrCalculateHash(for: asset)
}
}
var result: [String?] = []
for await hash in group {
result.append(hash)
}
return result
}
// 将哈希值转换为数值向量
let vectors = hashes.compactMap { $0.map { hashToVector($0) } }
// 使用 K-Means 聚类算法
let k = min(assets.count, 10) // 假设最多 10 个簇
let labels = kMeansClustering(data: vectors, k: k)
// 根据聚类结果分组
var clusters: [[PHAsset]] = Array(repeating: [], count: k)
for (i, label) in labels.enumerated() {
clusters[label].append(assets[i])
}
// 过滤掉只有一个元素的簇
return clusters.filter { $0.count > 1 }
}
}
//
// VideoSimilarManager.swift
// CleanPhoto
//
// Created by edy on 2025/5/9.
//
import Foundation
import Photos
import UIKit
import AVFoundation
// MARK: - 缓存机制
private struct VideoAssetCache {
let avAsset: AVAsset
let size: Int64
let frameRate: Double
let firstFrame: UIImage?
}
private actor VideoAssetCacheManager {
private var cache: [String: VideoAssetCache] = [:]
private let maxCacheSize = 50 // 最大缓存数量
func getCache(for identifier: String) -> VideoAssetCache? {
return cache[identifier]
}
func setCache(_ videoCache: VideoAssetCache, for identifier: String) {
// 如果缓存过大,移除最早的项
if cache.count >= maxCacheSize {
let oldestKey = cache.keys.first
if let key = oldestKey {
cache.removeValue(forKey: key)
}
}
cache[identifier] = videoCache
}
func clearCache() {
cache.removeAll()
}
}
@MainActor
class VideoSimilarJSONManager: @unchecked Sendable {
static let shared = VideoSimilarJSONManager()
private let stateManager = VideoSimilarStateManager()
private init() {}
// 类中添加缓存管理器实例
private let assetCacheManager = VideoAssetCacheManager()
// MARK: - 配置参数
private let timeWindowInSeconds: TimeInterval = 600 // 10分钟时间窗口
private let durationThreshold: Double = 0.1 // 时长相差阈值(20%)
private let resolutionThreshold: Double = 0.2 // 分辨率相差阈值(20%)
private let frameRateThreshold: Double = 0.1 // 帧率相差阈值(10%)
private let hashDistanceThreshold: Int = 10 // 第一帧图像hash汉明距离阈值
// 文件路径
private var timeGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("VideoTimeGroups.json").path
}
private var similarGroupsPath: String {
let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
return paths[0].appendingPathComponent("VideoSimilarGroups.json").path
}
private var currentTask: Task<Void, Error>?
private let latestVideoTimeKey = "LatestVideoTimestamp"
// MARK: - 主要处理函数
func findSimilarVideos(in assets: [PHAsset],
progressHandler: (([AssetModel]) -> Void)?,
completionHandler: (([[AssetModel]]) -> Void)?) {
Task {
// 1. 加载本地数据
await loadStoredData()
// 2. 获取上次记录的最新资源时间戳
var lastLatestTime = UserDefaults.standard.double(forKey: latestVideoTimeKey)
if lastLatestTime == 0{
lastLatestTime = assets.first?.creationDate?.timeIntervalSince1970 ?? 0
}
// 3. 通知已缓存的结果
let cachedGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
for group in cachedGroups {
progressHandler?(group.assets)
}
}
// 4. 时间分组处理
// 拿到大于上次存储最新时间的资源
let newAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 > lastLatestTime}
// 拿到最新资源时间之前的资源
let oldAssets = assets.filter{$0.creationDate?.timeIntervalSince1970 ?? 0 <= lastLatestTime}
// 更新最新资源的时间
if let latestAsset = assets.first {
let latestTime = latestAsset.creationDate?.timeIntervalSince1970 ?? 0
UserDefaults.standard.set(latestTime, forKey: latestVideoTimeKey)
UserDefaults.standard.synchronize()
}
let newtimeGroup = groupVideosByTimeWindow(newAssets)
let oldGroups = groupVideosByTimeWindow(oldAssets)
let timeGroups = newtimeGroup + oldGroups
var unprocessedGroups: [[PHAsset]] = []
let processedTimeGroups = await stateManager.getAllTimeGroups()
// 5. 过滤已处理的时间组
for group in timeGroups {
if let firstAsset = group.first,
let lastAsset = group.last,
let firstDate = firstAsset.creationDate,
let lastDate = lastAsset.creationDate {
let isProcessed = processedTimeGroups.contains { timeGroup in
return timeGroup.startTime <= firstDate.timeIntervalSince1970 &&
timeGroup.endTime >= lastDate.timeIntervalSince1970 &&
timeGroup.isProcessed
}
if !isProcessed {
unprocessedGroups.append(group)
}
}
}
// 6. 并发处理未处理的组
let maxConcurrency = 4 // 视频处理较重,降低并发数
let batchSize = max(1, unprocessedGroups.count / maxConcurrency)
if unprocessedGroups.isEmpty {
let total = cachedGroups.compactMap { $0.assets }
completionHandler?(total)
return
}
for batchIndex in stride(from: 0, to: unprocessedGroups.count, by: batchSize) {
let batch = Array(unprocessedGroups[batchIndex..<min(batchIndex + batchSize, unprocessedGroups.count)])
await withTaskGroup(of: Void.self) { group in
autoreleasepool {
for unGroup in batch {
group.addTask { [weak self] in
guard let self = self else { return }
// 6.1 按视频特征预分组
let preGroups = await self.groupVideosByFeatures(unGroup)
// 6.2 处理每个预分组
for preGroup in preGroups {
let similarGroups = await self.findSimilarInGroupByFirstFrame(preGroup)
// 6.3 保存相似组
for similarGroup in similarGroups {
let groupId = UUID().uuidString
let assetModels = await self.createAssetModels(from: similarGroup)
// 6.4 通知进度
await MainActor.run {
progressHandler?(assetModels)
}
// 6.5 保存相似组
await self.stateManager.appendSimilarGroup(
SimilarGroupModel(groupId: groupId, assets: assetModels)
)
if await self.stateManager.shouldSavePendingGroups() {
await self.savePendingSimilarGroups()
}
}
}
// 6.6 标记时间组为已处理
if let firstDate = unGroup.first?.creationDate,
let lastDate = unGroup.last?.creationDate {
let groupId = "\(Int(firstDate.timeIntervalSince1970))_\(Int(lastDate.timeIntervalSince1970))"
let timeGroup = TimeGroupModel(
groupId: groupId,
startTime: firstDate.timeIntervalSince1970,
endTime: lastDate.timeIntervalSince1970,
isProcessed: true
)
await self.saveTimeGroup(timeGroup)
}
}
}
}
}
}
// 7. 完成处理
if await !stateManager.getpendingSimilarGroups().isEmpty {
await savePendingSimilarGroups()
}
let allGroups = await stateManager.getAllSimilarGroups()
await MainActor.run {
completionHandler?(allGroups.map { $0.assets })
}
}
}
// MARK: - 视频特征分组
private func groupVideosByFeatures(_ assets: [PHAsset]) async -> [[PHAsset]] {
var featureGroups: [[PHAsset]] = []
var processedAssets = Set<String>()
for asset in assets {
if processedAssets.contains(asset.localIdentifier) {
continue
}
var currentGroup = [asset]
processedAssets.insert(asset.localIdentifier)
let assetDuration = asset.duration
let assetSize = Double(asset.pixelWidth * asset.pixelHeight)
// 获取视频帧率
let assetFrameRate = await getVideoFrameRate(asset)
for compareAsset in assets {
if processedAssets.contains(compareAsset.localIdentifier) {
continue
}
// 比较时长
let durationRatio = abs(compareAsset.duration - assetDuration) / max(compareAsset.duration, assetDuration)
if durationRatio > durationThreshold {
continue
}
// 比较分辨率
let compareSize = Double(compareAsset.pixelWidth * compareAsset.pixelHeight)
let sizeRatio = abs(compareSize - assetSize) / max(compareSize, assetSize)
if sizeRatio > resolutionThreshold {
continue
}
// 比较帧率
let compareFrameRate = await getVideoFrameRate(compareAsset)
let frameRateRatio = abs(compareFrameRate - assetFrameRate) / max(compareFrameRate, assetFrameRate)
if frameRateRatio > frameRateThreshold {
continue
}
currentGroup.append(compareAsset)
processedAssets.insert(compareAsset.localIdentifier)
}
if currentGroup.count > 1 {
featureGroups.append(currentGroup)
}
}
return featureGroups
}
private func getVideoFrameRate(_ asset: PHAsset) async -> Double {
// 先检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.frameRate
}
return await withCheckedContinuation { continuation in
let options = PHVideoRequestOptions()
options.version = .original
options.deliveryMode = .fastFormat
options.isNetworkAccessAllowed = false
PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { [weak self] avAsset, _, _ in
guard let self = self,
let videoAsset = avAsset,
let track = videoAsset.tracks(withMediaType: .video).first else {
continuation.resume(returning: 0.0)
return
}
// 获取第一帧
let generator = AVAssetImageGenerator(asset: videoAsset)
generator.appliesPreferredTrackTransform = true
generator.maximumSize = CGSize(width: 640, height: 640)
var firstFrame: UIImage?
do {
let cgImage = try generator.copyCGImage(at: .zero, actualTime: nil)
firstFrame = UIImage(cgImage: cgImage)
} catch {
print("获取第一帧失败:", error)
}
// 创建并保存缓存
let frameRate = Double(track.nominalFrameRate)
let cache = VideoAssetCache(
avAsset: videoAsset,
size: 0, // 这里可以根据需要获取视频大小
frameRate: frameRate,
firstFrame: firstFrame
)
Task {
await self.assetCacheManager.setCache(cache, for: asset.localIdentifier)
}
continuation.resume(returning: frameRate)
}
}
}
private func getFirstFrame(for asset: PHAsset) async -> UIImage? {
// 先检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.firstFrame
}
// 如果缓存中没有,调用 getVideoFrameRate 会自动缓存 VideoAssetCache
_ = await getVideoFrameRate(asset)
// 再次检查缓存
if let cache = await assetCacheManager.getCache(for: asset.localIdentifier) {
return cache.firstFrame
}
return nil
}
// MARK: - 第一帧相似度比较
private func findSimilarInGroupByFirstFrame(_ assets: [PHAsset]) async -> [[PHAsset]] {
// 1. 获取所有视频第一帧的hash
var assetHashes: [(asset: PHAsset, hash: String)] = []
for asset in assets {
if let hash = await getFirstFrameHash(for: asset) {
assetHashes.append((asset, hash))
}
}
guard !assetHashes.isEmpty else { return [] }
// 2. 将hash字符串转换为特征向量
let vectors = assetHashes.map { hashToVector($0.hash) }
// 3. 执行K-Means聚类
let k = min(vectors.count / 2, max(2, Int(sqrt(Double(vectors.count)))))
let clusters = kMeansClustering(vectors: vectors, k: k)
// 4. 将聚类结果转换为相似组
var similarGroups: [[PHAsset]] = []
for cluster in clusters {
let groupAssets = cluster.indices.map { assetHashes[$0].asset }
if groupAssets.count > 1 {
similarGroups.append(groupAssets)
}
}
return similarGroups
}
// 将hash字符串转换为特征向量
private func hashToVector(_ hash: String) -> [Double] {
var vector: [Double] = []
let chunkSize = 8
// 将二进制hash字符串按8位分组,转换为数值
for i in stride(from: 0, to: hash.count, by: chunkSize) {
let endIndex = min(i + chunkSize, hash.count)
let start = hash.index(hash.startIndex, offsetBy: i)
let end = hash.index(hash.startIndex, offsetBy: endIndex)
let chunk = String(hash[start..<end])
if let value = Int(chunk, radix: 2) {
vector.append(Double(value))
}
}
return vector
}
// K-Means聚类算法实现
private func kMeansClustering(vectors: [[Double]], k: Int) -> [[Int]] {
guard vectors.count >= k else { return [Array(0..<vectors.count)] }
// 1. 随机选择初始中心点
var centroids = (0..<k).map { _ in vectors[Int.random(in: 0..<vectors.count)] }
var clusters: [[Int]] = Array(repeating: [], count: k)
var previousClusters: [[Int]] = []
// 2. 迭代直到收敛或达到最大迭代次数
let maxIterations = 100
var iteration = 0
while iteration < maxIterations {
// 清空当前聚类
clusters = Array(repeating: [], count: k)
// 3. 分配点到最近的中心点
for (index, vector) in vectors.enumerated() {
var minDistance = Double.infinity
var closestCentroid = 0
for (centroidIndex, centroid) in centroids.enumerated() {
let distance = euclideanDistance(vector, centroid)
if distance < minDistance {
minDistance = distance
closestCentroid = centroidIndex
}
}
clusters[closestCentroid].append(index)
}
// 4. 检查是否收敛
if clusters == previousClusters {
break
}
// 5. 更新中心点
for i in 0..<k {
guard !clusters[i].isEmpty else { continue }
let clusterVectors = clusters[i].map { vectors[$0] }
centroids[i] = calculateMean(clusterVectors)
}
previousClusters = clusters
iteration += 1
}
return clusters
}
// 计算欧氏距离
private func euclideanDistance(_ v1: [Double], _ v2: [Double]) -> Double {
guard v1.count == v2.count else { return Double.infinity }
let sum = zip(v1, v2).map { pow($0 - $1, 2) }.reduce(0, +)
return sqrt(sum)
}
// 计算向量均值
private func calculateMean(_ vectors: [[Double]]) -> [Double] {
guard !vectors.isEmpty else { return [] }
let count = Double(vectors.count)
var mean = Array(repeating: 0.0, count: vectors[0].count)
for vector in vectors {
for (i, value) in vector.enumerated() {
mean[i] += value / count
}
}
return mean
}
// MARK: - 获取视频第一帧
private func getFirstFrameHash(for asset: PHAsset) async -> String? {
// 先检查缓存
if let cachedHash = await stateManager.getCachedHash(for: asset.localIdentifier) {
return cachedHash
}
// 获取第一帧图像
let image = await getFirstFrame(for: asset)
guard let image = image else { return nil }
// 计算hash
let hash = calculateImageHash(image)
await stateManager.setCachedHash(hash, for: asset.localIdentifier)
await stateManager.setCachedImage(image, for: asset.localIdentifier)
return hash
}
// MARK: - 辅助方法
private func groupVideosByTimeWindow(_ assets: [PHAsset]) -> [[PHAsset]] {
// 按时间降序排序(新的在前)
let sortedAssets = assets.sorted { ($0.creationDate ?? Date()) > ($1.creationDate ?? Date()) }
var timeGroups: [[PHAsset]] = []
var currentGroup: [PHAsset] = []
if let firstAsset = sortedAssets.first {
var groupStartTime = firstAsset.creationDate ?? Date()
for asset in sortedAssets {
let currentTime = asset.creationDate ?? Date()
// 计算时间差(因为是降序,所以要用 groupStartTime 减 currentTime)
let timeDiff = groupStartTime.timeIntervalSince(currentTime)
// 如果时间差超过窗口大小,创建新组
if timeDiff > timeWindowInSeconds {
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
// 创建新组,并使用当前资源时间作为新的起始时间
currentGroup = []
groupStartTime = currentTime
}
currentGroup.append(asset)
}
// 处理最后一组
if currentGroup.count > 1 {
timeGroups.append(currentGroup)
}
}
return timeGroups
}
private func calculateImageHash(_ image: UIImage) -> String {
guard let cgImage = image.cgImage else { return "" }
let ciImage = CIImage(cgImage: cgImage)
let size = CGSize(width: 32, height: 32)
UIGraphicsBeginImageContextWithOptions(size, true, 1.0)
image.draw(in: CGRect(origin: .zero, size: size))
guard UIGraphicsGetImageFromCurrentImageContext() != nil else {
UIGraphicsEndImageContext()
return ""
}
UIGraphicsEndImageContext()
guard let filter = CIFilter(name: "CIPhotoEffectNoir"),
let outputImage = filter.outputImage else {
return ""
}
filter.setValue(ciImage, forKey: kCIInputImageKey)
let context = CIContext()
guard let scaledImage = context.createCGImage(outputImage, from: outputImage.extent),
let pixelData = UIImage(cgImage: scaledImage).cgImage?.dataProvider?.data,
let data = CFDataGetBytePtr(pixelData) else {
return ""
}
var pixels = Array(repeating: UInt8(0), count: 1024)
for i in 0..<32 {
for j in 0..<32 {
let pixelIndex = (i * 32 + j) * 4
let gray = UInt8(
0.299 * Double(data[pixelIndex]) +
0.587 * Double(data[pixelIndex + 1]) +
0.114 * Double(data[pixelIndex + 2])
)
pixels[i * 32 + j] = gray
}
}
let average = UInt8(pixels.reduce(0, { UInt32($0) + UInt32($1) }) / UInt32(pixels.count))
return pixels.map { $0 > average ? "1" : "0" }.joined()
}
private func calculateHammingDistance(_ hash1: String, _ hash2: String) -> Int {
guard hash1.count == hash2.count else { return Int.max }
return zip(hash1, hash2).filter { $0 != $1 }.count
}
private func createAssetModels(from assets: [PHAsset]) -> [AssetModel] {
return assets.map { asset in
let assetSize: Double
if let resource = PHAssetResource.assetResources(for: asset).first,
let size = resource.value(forKey: "fileSize") as? Int64 {
assetSize = Double(size)
} else {
assetSize = 0
}
return AssetModel(
localIdentifier: asset.localIdentifier,
assetSize: assetSize,
createDate: asset.creationDate ?? Date(),
mediaType: 2
)
}
}
}
// MARK: - 存储相关方法
extension VideoSimilarJSONManager {
private func loadStoredData() async {
var loadedTimeGroups: [TimeGroupModel] = []
var loadedSimilarGroups: [SimilarGroupModel] = []
if let data = try? Data(contentsOf: URL(fileURLWithPath: timeGroupsPath)),
let groups = try? JSONDecoder().decode([TimeGroupModel].self, from: data) {
loadedTimeGroups = groups
}
if let data = try? Data(contentsOf: URL(fileURLWithPath: similarGroupsPath)),
let groups = try? JSONDecoder().decode([SimilarGroupModel].self, from: data) {
loadedSimilarGroups = groups
}
await stateManager.loadStoredData(timeGroups: loadedTimeGroups, similarGroups: loadedSimilarGroups)
}
private func saveTimeGroup(_ group: TimeGroupModel) async {
await stateManager.appendTimeGroup(group)
if let data = try? JSONEncoder().encode(await stateManager.getAllTimeGroups()) {
try? data.write(to: URL(fileURLWithPath: timeGroupsPath))
}
}
private func savePendingSimilarGroups() async {
await stateManager.savePendingGroups()
if let data = try? JSONEncoder().encode(await stateManager.getAllSimilarGroups()) {
try? data.write(to: URL(fileURLWithPath: similarGroupsPath))
}
}
}
// private func getFirstFrame(for asset: PHAsset) async -> UIImage? {
// return await withCheckedContinuation { continuation in
// let options = PHVideoRequestOptions()
// options.version = .original
// options.deliveryMode = .fastFormat
// options.isNetworkAccessAllowed = false
//
// // 将 options 的配置复制到闭包内
// PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
// guard let videoAsset = avAsset else {
// continuation.resume(returning: nil)
// return
// }
//
// let generator = AVAssetImageGenerator(asset: videoAsset)
// generator.appliesPreferredTrackTransform = true
// generator.maximumSize = CGSize(width: 640, height: 640)
//
// do {
// let cgImage = try generator.copyCGImage(at: .zero, actualTime: nil)
// let image = UIImage(cgImage: cgImage)
// continuation.resume(returning: image)
// } catch {
// continuation.resume(returning: nil)
// }
// }
// }
// }
// private func getVideoFrameRate(_ asset: PHAsset) async -> Double {
// return await withCheckedContinuation { continuation in
// let options = PHVideoRequestOptions()
// options.version = .original
// options.deliveryMode = .fastFormat
// options.isNetworkAccessAllowed = false
//
// PHImageManager.default().requestAVAsset(forVideo: asset, options: nil) { avAsset, _, _ in
// if let videoAsset = avAsset, let track = videoAsset.tracks(withMediaType: .video).first {
// continuation.resume(returning: Double(track.nominalFrameRate))
// } else {
// continuation.resume(returning: 0.0)
// }
// }
// }
// }
...@@ -358,28 +358,28 @@ class HomePhotosModel:Codable { ...@@ -358,28 +358,28 @@ class HomePhotosModel:Codable {
} }
class AssetModel :Codable,Hashable { //class AssetModel :Codable,Hashable {
var localIdentifier : String // var localIdentifier : String
var assetSize : Double // var assetSize : Double
var createDate : Date // var createDate : Date
init(localIdentifier: String, assetSize: Double, createDate: Date) { // init(localIdentifier: String, assetSize: Double, createDate: Date) {
self.localIdentifier = localIdentifier // self.localIdentifier = localIdentifier
self.assetSize = assetSize // self.assetSize = assetSize
self.createDate = createDate // self.createDate = createDate
} // }
//
func hash(into hasher: inout Hasher) { // func hash(into hasher: inout Hasher) {
hasher.combine(localIdentifier) // hasher.combine(localIdentifier)
hasher.combine(assetSize) // hasher.combine(assetSize)
hasher.combine(createDate) // hasher.combine(createDate)
} // }
//
static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool { // static func ==(lhs: AssetModel, rhs: AssetModel) -> Bool {
return lhs.localIdentifier == rhs.localIdentifier && // return lhs.localIdentifier == rhs.localIdentifier &&
lhs.assetSize == rhs.assetSize && // lhs.assetSize == rhs.assetSize &&
lhs.createDate == rhs.createDate // lhs.createDate == rhs.createDate
} // }
} //}
...@@ -21,10 +21,10 @@ class TrashViewController: UIViewController { ...@@ -21,10 +21,10 @@ class TrashViewController: UIViewController {
override func viewDidLoad() { override func viewDidLoad() {
super.viewDidLoad() super.viewDidLoad()
configUI() configUI()
} }
func configUI(){ func configUI(){
view.backgroundColor = .white view.backgroundColor = .white
contentScrollView = UIScrollView(frame: CGRect(x: 0, y: 0, width: ScreenW, height: contentH)) contentScrollView = UIScrollView(frame: CGRect(x: 0, y: 0, width: ScreenW, height: contentH))
...@@ -79,7 +79,6 @@ class TrashViewController: UIViewController { ...@@ -79,7 +79,6 @@ class TrashViewController: UIViewController {
otherView.frame = CGRect(x: viewWidth, y: 0, width: viewWidth, height: contentH) otherView.frame = CGRect(x: viewWidth, y: 0, width: viewWidth, height: contentH)
shotView.frame = CGRect(x: viewWidth*2, y: 0, width: viewWidth, height: contentH) shotView.frame = CGRect(x: viewWidth*2, y: 0, width: viewWidth, height: contentH)
chatView.frame = CGRect(x: viewWidth*3, y: 0, width: viewWidth, height: contentH) chatView.frame = CGRect(x: viewWidth*3, y: 0, width: viewWidth, height: contentH)
} }
lazy var videoView:TrashContenView = { lazy var videoView:TrashContenView = {
...@@ -107,44 +106,46 @@ class TrashViewController: UIViewController { ...@@ -107,44 +106,46 @@ class TrashViewController: UIViewController {
}() }()
lazy var collectionView:UICollectionView = {
let layout = UICollectionViewFlowLayout()
layout.itemSize = CGSize(width: view.width, height: view.height)
layout.scrollDirection = .horizontal
layout.minimumInteritemSpacing = 0
layout.minimumLineSpacing = 0
let collectionView = UICollectionView(frame: CGRect(x: 0, y: 0, width: view.width, height: view.height), collectionViewLayout:layout)
collectionView.isPagingEnabled = true
collectionView.delegate = self
collectionView.backgroundColor = .white
collectionView.dataSource = self
collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell0")
collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell1")
collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell2")
collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell3")
return collectionView
}()
//
// lazy var collectionView:UICollectionView = {
// let layout = UICollectionViewFlowLayout()
// layout.itemSize = CGSize(width: view.width, height: view.height)
// layout.scrollDirection = .horizontal
// layout.minimumInteritemSpacing = 0
// layout.minimumLineSpacing = 0
// let collectionView = UICollectionView(frame: CGRect(x: 0, y: 0, width: view.width, height: view.height), collectionViewLayout:layout)
// collectionView.isPagingEnabled = true
// collectionView.delegate = self
// collectionView.backgroundColor = .white
// collectionView.dataSource = self
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell0")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell1")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell2")
// collectionView.register(UINib(nibName: "TrashContenViewCell", bundle: nil), forCellWithReuseIdentifier: "TrashContenViewCell3")
//
// return collectionView
// }()
} }
extension TrashViewController:UICollectionViewDelegate,UICollectionViewDataSource{ //extension TrashViewController:UICollectionViewDelegate,UICollectionViewDataSource{
//
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { // func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return source.count // return source.count
} // }
//
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { // func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenViewCell\(indexPath.row)", for: indexPath) as! TrashContenViewCell // let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenViewCell\(indexPath.row)", for: indexPath) as! TrashContenViewCell
cell.trashType = source[indexPath.row] // cell.trashType = source[indexPath.row]
return cell // return cell
} // }
//
//}
}
extension TrashViewController:UIScrollViewDelegate{ extension TrashViewController:UIScrollViewDelegate{
...@@ -170,7 +171,4 @@ extension TrashViewController:UIScrollViewDelegate{ ...@@ -170,7 +171,4 @@ extension TrashViewController:UIScrollViewDelegate{
self.currentPage = currentPage + 1 self.currentPage = currentPage + 1
} }
} }
...@@ -21,8 +21,21 @@ class TrashContenAssetCell: UICollectionViewCell { ...@@ -21,8 +21,21 @@ class TrashContenAssetCell: UICollectionViewCell {
assetImage.cornerCut(radius: 8, corner: .allCorners) assetImage.cornerCut(radius: 8, corner: .allCorners)
} }
var model:AssetModel?{
didSet{
guard let model = model else{
return
}
}
// assetImage.im
}
@IBAction func removeClick(_ sender: Any) { @IBAction func removeClick(_ sender: Any) {
} }
} }
...@@ -22,6 +22,8 @@ class TrashContenView: UIView { ...@@ -22,6 +22,8 @@ class TrashContenView: UIView {
var scrollLine:UIView! var scrollLine:UIView!
let lineW:CGFloat = (ScreenW - 62) / 4.0 let lineW:CGFloat = (ScreenW - 62) / 4.0
var dataSource:[AssetModel] = []
override init(frame: CGRect) { override init(frame: CGRect) {
super.init(frame: frame) super.init(frame: frame)
backgroundColor = .white backgroundColor = .white
...@@ -32,6 +34,17 @@ class TrashContenView: UIView { ...@@ -32,6 +34,17 @@ class TrashContenView: UIView {
fatalError("init(coder:) has not been implemented") fatalError("init(coder:) has not been implemented")
} }
func getData(){
// dataSource = TrashDatabase.shared.queryByMediaType(trashType.dbType).compactMap({ (localIdentifier: String, assetSize: Double, createDate: Date, mediaType: Int) in
// return AssetModel.init(localIdentifier: localIdentifier, assetSize: assetSize, createDate: createDate)
// })
collectionView.reloadData()
}
func configUI(){ func configUI(){
let layout = UICollectionViewFlowLayout() let layout = UICollectionViewFlowLayout()
layout.minimumInteritemSpacing = 12 layout.minimumInteritemSpacing = 12
...@@ -223,7 +236,7 @@ class TrashContenView: UIView { ...@@ -223,7 +236,7 @@ class TrashContenView: UIView {
case .chat: case .chat:
scrollLine.frame = lineFour.frame scrollLine.frame = lineFour.frame
} }
collectionView.reloadData() getData()
} }
} }
} }
...@@ -236,24 +249,11 @@ extension TrashContenView:UICollectionViewDelegate,UICollectionViewDataSource,UI ...@@ -236,24 +249,11 @@ extension TrashContenView:UICollectionViewDelegate,UICollectionViewDataSource,UI
} }
func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int {
return 20 //section == 0 ? 1 : 20 return dataSource.count
} }
func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell {
// if indexPath.section == 0{
// let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenTitleCell", for: indexPath) as! TrashContenTitleCell
// cell.trashType = trashType
// return cell
// }
let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenAssetCell", for: indexPath) as! TrashContenAssetCell let cell = collectionView.dequeueReusableCell(withReuseIdentifier: "TrashContenAssetCell", for: indexPath) as! TrashContenAssetCell
return cell return cell
} }
// func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize{
// return indexPath.section == 0 ? CGSize(width: ScreenW, height: 92) : CGSize(width: (ScreenW-56)/3, height: (ScreenW-56)/3)
// }
//
// func collectionView(_ collectionView: UICollectionView, layout collectionViewLayout: UICollectionViewLayout, insetForSectionAt section: Int) -> UIEdgeInsets{
// return section == 0 ? UIEdgeInsets() : UIEdgeInsets(top: 0, left: 16, bottom: 0, right: 16)
// }
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment