ClassifyPhoto.swift 43 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060
  1. import Photos
  2. import Vision
  3. class ClassifyPhoto {
  4. struct PhotoSizeInfo {
  5. var totalSize: Int64 = 0
  6. var count: Int = 0
  7. }
  8. struct ClassifiedPhotos {
  9. var screenshots: [PHAsset] = []
  10. var locations: [String: [PHAsset]] = [:] // 按地点分组
  11. var people: [String: [PHAsset]] = [:] // 按人物分组
  12. var similarPhotos: [[PHAsset]] = [] // 存储相似照片组
  13. var blurryPhotos: [PHAsset] = [] // 添加模糊照片数组
  14. // 添加容量信息
  15. var screenshotsSize: PhotoSizeInfo = PhotoSizeInfo()
  16. var locationsSize: PhotoSizeInfo = PhotoSizeInfo()
  17. var peopleSize: PhotoSizeInfo = PhotoSizeInfo()
  18. var similarPhotosSize: PhotoSizeInfo = PhotoSizeInfo()
  19. var blurryPhotosSize: PhotoSizeInfo = PhotoSizeInfo() // 添加模糊照片容量信息
  20. }
  21. // 添加位置缓存
  22. private var locationCache: [String: String] = [:]
  23. func classifyPhotos(
  24. assets: PHFetchResult<PHAsset>,
  25. progressHandler: @escaping (String, Float) -> Void,
  26. completion: @escaping (ClassifiedPhotos) -> Void
  27. ) {
  28. // 在后台队列处理
  29. DispatchQueue.global(qos: .userInitiated).async {
  30. var result = ClassifiedPhotos()
  31. let group = DispatchGroup()
  32. // 开始处理
  33. DispatchQueue.main.async {
  34. progressHandler("正在加载照片...", 0.0)
  35. }
  36. // 先处理模糊照片检测(占进度的 30%)
  37. group.enter()
  38. progressHandler("正在检测模糊照片...", 0.0)
  39. self.detectBlurryPhotos(from: assets) { blurryPhotos in
  40. result.blurryPhotos = blurryPhotos
  41. progressHandler("模糊照片检测完成", 0.3)
  42. group.leave()
  43. }
  44. // 1. 检测截图 (占总进度的 20%)
  45. group.enter()
  46. self.fetchScreenshots(from: assets) { screenshots in
  47. result.screenshots = screenshots
  48. DispatchQueue.main.async {
  49. progressHandler("正在检测截图...", 0.3)
  50. }
  51. group.leave()
  52. }
  53. // 2. 检测相似照片 (占总进度的 80%)
  54. group.enter()
  55. self.detectSimilarPhotos(
  56. assets: assets,
  57. progressHandler: { stage, progress in
  58. // 将相似照片检测的进度映射到 20%-100% 的范围
  59. let mappedProgress = 0.3 + (progress * 0.6)
  60. DispatchQueue.main.async {
  61. progressHandler(stage, mappedProgress)
  62. }
  63. }
  64. ) { similarPhotos in
  65. result.similarPhotos = similarPhotos
  66. group.leave()
  67. }
  68. // 3. 按地点分类 (占总进度的 20%)
  69. // group.enter()
  70. // self.classifyByLocation(assets: assets) { locationGroups in
  71. // result.locations = locationGroups
  72. // DispatchQueue.main.async {
  73. // progressHandler("正在按地点分类...", 0.8)
  74. // }
  75. // group.leave()
  76. // }
  77. // 4. 按人物分类 (占总进度的 20%)
  78. group.enter()
  79. self.classifyByPeople(assets: assets) { peopleGroups in
  80. result.people = peopleGroups
  81. DispatchQueue.main.async {
  82. progressHandler("正在按人物分类...", 1.0)
  83. }
  84. group.leave()
  85. }
  86. // // 添加模糊照片检测
  87. // group.enter()
  88. // self.detectBlurryPhotos(from: assets) { blurryPhotos in
  89. // result.blurryPhotos = blurryPhotos
  90. // DispatchQueue.main.async {
  91. // progressHandler("正在检测模糊照片...", 1.0)
  92. // }
  93. // group.leave()
  94. // }
  95. // 在所有分类完成后计算大小
  96. group.notify(queue: .main) {
  97. let sizeGroup = DispatchGroup()
  98. // 计算模糊照片大小
  99. sizeGroup.enter()
  100. self.calculateAssetsSize(result.blurryPhotos) { sizeInfo in
  101. result.blurryPhotosSize = sizeInfo
  102. sizeGroup.leave()
  103. }
  104. // 计算相似照片大小
  105. sizeGroup.enter()
  106. let similarAssets = Array(result.similarPhotos.flatMap { $0 })
  107. self.calculateAssetsSize(similarAssets) { sizeInfo in
  108. result.similarPhotosSize = sizeInfo
  109. sizeGroup.leave()
  110. }
  111. // 计算截图大小
  112. sizeGroup.enter()
  113. self.calculateAssetsSize(result.screenshots) { sizeInfo in
  114. result.screenshotsSize = sizeInfo
  115. sizeGroup.leave()
  116. }
  117. // // 计算地点照片大小
  118. // sizeGroup.enter()
  119. // let locationAssets = Array(result.locations.values.flatMap { $0 })
  120. // self.calculateAssetsSize(locationAssets) { sizeInfo in
  121. // result.locationsSize = sizeInfo
  122. // sizeGroup.leave()
  123. // }
  124. // 计算人物照片大小
  125. sizeGroup.enter()
  126. let peopleAssets = Array(result.people.values.flatMap { $0 })
  127. self.calculateAssetsSize(peopleAssets) { sizeInfo in
  128. result.peopleSize = sizeInfo
  129. sizeGroup.leave()
  130. }
  131. // 所有大小计算完成后回调
  132. sizeGroup.notify(queue: .main) {
  133. progressHandler("分类完成", 1.0)
  134. completion(result)
  135. }
  136. }
  137. }
  138. }
  139. // 添加内存清理辅助方法
  140. private func cleanupMemory() {
  141. // 清理图像缓存
  142. URLCache.shared.removeAllCachedResponses()
  143. // 强制进行一次垃圾回收
  144. autoreleasepool {
  145. let _ = [String](repeating: "temp", count: 1)
  146. }
  147. #if os(iOS)
  148. // 发送低内存警告
  149. UIApplication.shared.perform(Selector(("_performMemoryWarning")))
  150. #endif
  151. }
  152. func detectSimilarPhotos(
  153. assets: PHFetchResult<PHAsset>,
  154. progressHandler: @escaping (String, Float) -> Void,
  155. completion: @escaping ([[PHAsset]]) -> Void
  156. ) {
  157. var similarGroups: [[PHAsset]] = []
  158. let group = DispatchGroup()
  159. if #available(iOS 13.0, *) {
  160. var imageFeatures: [(asset: PHAsset, feature: VNFeaturePrintObservation)] = []
  161. // 创建处理队列
  162. let processingQueue = DispatchQueue(label: "com.app.similarPhotos", qos: .userInitiated)
  163. let semaphore = DispatchSemaphore(value: 4) // 增加并发数以提高效率
  164. // 1. 提取所有图片的特征
  165. let totalAssets = assets.count
  166. var processedAssets = 0
  167. progressHandler("正在加载照片...", 0.0)
  168. for i in 0..<assets.count {
  169. let asset = assets[i]
  170. group.enter()
  171. semaphore.wait()
  172. let options = PHImageRequestOptions()
  173. options.deliveryMode = .fastFormat // 使用快速模式
  174. options.isSynchronous = false
  175. options.resizeMode = .fast
  176. DispatchQueue.global(qos: .background).async {
  177. PHImageManager.default().requestImage(
  178. for: asset,
  179. targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  180. contentMode: .aspectFit,
  181. options: options
  182. ) { image, _ in
  183. defer {
  184. semaphore.signal()
  185. }
  186. guard let image = image,
  187. let cgImage = image.cgImage else {
  188. group.leave()
  189. return
  190. }
  191. processingQueue.async {
  192. do {
  193. let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
  194. let request = VNGenerateImageFeaturePrintRequest()
  195. try requestHandler.perform([request])
  196. if let result = request.results?.first as? VNFeaturePrintObservation {
  197. imageFeatures.append((asset, result))
  198. // 更新特征提取进度
  199. processedAssets += 1
  200. let progress = Float(processedAssets) / Float(totalAssets)
  201. progressHandler("正在提取特征...", progress * 0.6)
  202. }
  203. } catch {
  204. print("特征提取失败: \(error)")
  205. }
  206. group.leave()
  207. }
  208. }
  209. }
  210. }
  211. group.notify(queue: processingQueue) {
  212. progressHandler("正在比较相似度...", 0.6)
  213. // 近似度
  214. let similarityThreshold: Float = 0.7
  215. var similarGroups: [[PHAsset]] = []
  216. // 使用并行处理来加速比较
  217. let processingGroup = DispatchGroup()
  218. let processingQueue = DispatchQueue(label: "com.yourapp.similarity.processing", attributes: .concurrent)
  219. let resultsQueue = DispatchQueue(label: "com.yourapp.similarity.results")
  220. let semaphore = DispatchSemaphore(value: 4) // 减少并发数量
  221. // 创建一个线程安全的数据结构来存储结果
  222. var processedIndices = Atomic<Set<Int>>(Set<Int>())
  223. var groupResults = Atomic<[Int: [PHAsset]]>([:])
  224. // 分批处理,每批处理一部分数据
  225. let batchSize = min(50, imageFeatures.count)
  226. // 修复 Float 转换错误
  227. let batchCount = Float(imageFeatures.count) / Float(batchSize)
  228. let batches = batchCount.isFinite ? Int(ceil(batchCount)) : 1
  229. for batchIndex in 0..<batches {
  230. let startIndex = batchIndex * batchSize
  231. let endIndex = min(startIndex + batchSize, imageFeatures.count)
  232. for i in startIndex..<endIndex {
  233. // 检查是否已处理
  234. if processedIndices.value.contains(i) { continue }
  235. semaphore.wait()
  236. processingGroup.enter()
  237. processingQueue.async {
  238. // 再次检查,因为可能在等待期间被其他线程处理
  239. if processedIndices.value.contains(i) {
  240. semaphore.signal()
  241. processingGroup.leave()
  242. return
  243. }
  244. var similarAssets: [PHAsset] = [imageFeatures[i].asset]
  245. processedIndices.mutate { $0.insert(i) }
  246. for j in (i + 1)..<imageFeatures.count {
  247. // 检查是否已处理
  248. if processedIndices.value.contains(j) { continue }
  249. do {
  250. var distance: Float = 0
  251. try imageFeatures[i].feature.computeDistance(&distance, to: imageFeatures[j].feature)
  252. // 检查距离值是否有效
  253. if distance.isNaN || distance.isInfinite {
  254. print("警告: 检测到无效的距离值")
  255. continue
  256. }
  257. // 确保距离在有效范围内
  258. distance = max(0, min(1, distance))
  259. let similarity = 1 - distance
  260. if similarity >= similarityThreshold {
  261. similarAssets.append(imageFeatures[j].asset)
  262. processedIndices.mutate { $0.insert(j) }
  263. }
  264. } catch {
  265. print("相似度计算失败: \(error)")
  266. }
  267. }
  268. // 只保存有多个相似图像的组
  269. if similarAssets.count > 1 {
  270. resultsQueue.async {
  271. groupResults.mutate { $0[i] = similarAssets }
  272. }
  273. }
  274. // 更新进度 - 添加安全检查
  275. if imageFeatures.count > 0 {
  276. let processedCount = Float(processedIndices.value.count)
  277. let totalCount = Float(imageFeatures.count)
  278. // 确保进度值有效
  279. var progress: Float = 0
  280. if processedCount.isFinite && totalCount.isFinite && totalCount > 0 {
  281. progress = processedCount / totalCount
  282. // 限制进度范围
  283. progress = max(0, min(1, progress))
  284. }
  285. DispatchQueue.main.async {
  286. progressHandler("正在比较相似度...", 0.6 + progress * 0.4)
  287. }
  288. }
  289. semaphore.signal()
  290. processingGroup.leave()
  291. }
  292. }
  293. }
  294. processingGroup.wait()
  295. // 整理结果
  296. similarGroups = Array(groupResults.value.values)
  297. // 按照照片数量降序排序
  298. similarGroups.sort { $0.count > $1.count }
  299. DispatchQueue.main.async {
  300. completion(similarGroups)
  301. }
  302. }
  303. }
  304. }
  305. func classifyByLocation(assets: PHFetchResult<PHAsset>,
  306. completion: @escaping ([String: [PHAsset]]) -> Void) {
  307. var locationGroups: [String: [PHAsset]] = [:]
  308. let group = DispatchGroup()
  309. let geocodeQueue = DispatchQueue(label: "com.app.geocoding")
  310. let semaphore = DispatchSemaphore(value: 10) // 限制并发请求数
  311. assets.enumerateObjects { asset, _, _ in
  312. if let location = asset.location {
  313. group.enter()
  314. semaphore.wait()
  315. geocodeQueue.async {
  316. let geocoder = CLGeocoder()
  317. geocoder.reverseGeocodeLocation(location) { placemarks, error in
  318. defer {
  319. semaphore.signal()
  320. group.leave()
  321. }
  322. if let placemark = placemarks?.first {
  323. let locationName = self.formatLocationName(placemark)
  324. DispatchQueue.main.async {
  325. if locationGroups[locationName] == nil {
  326. locationGroups[locationName] = []
  327. }
  328. locationGroups[locationName]?.append(asset)
  329. }
  330. }
  331. }
  332. }
  333. }
  334. }
  335. // 等待所有地理编码完成后回调
  336. group.notify(queue: .main) {
  337. completion(locationGroups)
  338. }
  339. }
  340. // 格式化地点名称(只返回城市名)
  341. func formatLocationName(_ placemark: CLPlacemark) -> String {
  342. if let city = placemark.locality {
  343. return city
  344. } else if let area = placemark.administrativeArea {
  345. return area
  346. }
  347. return "其他"
  348. }
  349. func classifyByPeople(assets: PHFetchResult<PHAsset>,
  350. completion: @escaping ([String: [PHAsset]]) -> Void) {
  351. var peopleGroups: [String: [PHAsset]] = [:]
  352. let group = DispatchGroup()
  353. // 创建专用队列和信号量控制并发
  354. let processingQueue = DispatchQueue(label: "com.app.peopleDetection", attributes: .concurrent)
  355. let resultQueue = DispatchQueue(label: "com.app.peopleResult")
  356. let semaphore = DispatchSemaphore(value: 4) // 限制并发数
  357. // 创建进度追踪
  358. var processedCount = 0
  359. let totalCount = assets.count
  360. // 分批处理,每批处理一部分数据
  361. let batchSize = 50
  362. let batches = Int(ceil(Float(assets.count) / Float(batchSize)))
  363. for batchIndex in 0..<batches {
  364. let startIndex = batchIndex * batchSize
  365. let endIndex = min(startIndex + batchSize, assets.count)
  366. // 使用自动释放池减少内存占用
  367. autoreleasepool {
  368. for i in startIndex..<endIndex {
  369. let asset = assets[i]
  370. group.enter()
  371. semaphore.wait()
  372. // 降低处理图片的分辨率
  373. let options = PHImageRequestOptions()
  374. options.deliveryMode = .fastFormat
  375. options.isSynchronous = false
  376. options.resizeMode = .fast
  377. processingQueue.async {
  378. // 使用自动释放池减少内存占用
  379. autoreleasepool {
  380. let result = PHImageManager.default().requestImage(
  381. for: asset,
  382. targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  383. contentMode: .aspectFit,
  384. options: options
  385. ) { image, _ in
  386. defer {
  387. semaphore.signal()
  388. }
  389. guard let image = image else {
  390. group.leave()
  391. return
  392. }
  393. // 使用 Vision 框架检测人脸
  394. guard let ciImage = CIImage(image: image) else {
  395. group.leave()
  396. return
  397. }
  398. let request = VNDetectFaceRectanglesRequest()
  399. let handler = VNImageRequestHandler(ciImage: ciImage, options: [:])
  400. do {
  401. try handler.perform([request])
  402. if let results = request.results, !results.isEmpty {
  403. // 检测到人脸,添加到数组
  404. resultQueue.async {
  405. if peopleGroups["包含人脸的照片"] == nil {
  406. peopleGroups["包含人脸的照片"] = []
  407. }
  408. peopleGroups["包含人脸的照片"]?.append(asset)
  409. }
  410. }
  411. } catch {
  412. print("人脸检测失败: \(error)")
  413. }
  414. // 更新进度
  415. resultQueue.async {
  416. processedCount += 1
  417. let progress = Float(processedCount) / Float(totalCount)
  418. DispatchQueue.main.async {
  419. print("人脸检测进度: \(Int(progress * 100))%")
  420. }
  421. }
  422. group.leave()
  423. }
  424. }
  425. }
  426. }
  427. }
  428. // 每批处理完后清理内存
  429. cleanupMemory()
  430. }
  431. // 等待所有检测完成后更新结果
  432. group.notify(queue: .main) {
  433. completion(peopleGroups)
  434. }
  435. }
  436. // 按人物分类
  437. // func classifyByPeople(assets: PHFetchResult<PHAsset>,
  438. // completion: @escaping ([String: [PHAsset]]) -> Void) {
  439. // var peopleGroups: [String: [PHAsset]] = [:]
  440. // let group = DispatchGroup()
  441. //
  442. // DispatchQueue.global(qos: .background).async {
  443. // // 创建一个数组来存储检测到人脸的照片
  444. // var facesArray: [PHAsset] = []
  445. //
  446. // // 遍历所有照片
  447. // assets.enumerateObjects { asset, _, _ in
  448. // group.enter()
  449. //
  450. // // 获取照片的缩略图进行人脸检测
  451. // let options = PHImageRequestOptions()
  452. // options.isSynchronous = false
  453. // options.deliveryMode = .fastFormat
  454. //
  455. // PHImageManager.default().requestImage(
  456. // for: asset,
  457. // targetSize: CGSize(width: 128, height: 128), // 使用较小的尺寸提高性能
  458. // contentMode: .aspectFit,
  459. // options: options
  460. // ) { image, _ in
  461. // guard let image = image else {
  462. // group.leave()
  463. // return
  464. // }
  465. //
  466. // // 使用 Vision 框架检测人脸
  467. // guard let ciImage = CIImage(image: image) else {
  468. // group.leave()
  469. // return
  470. // }
  471. //
  472. // let request = VNDetectFaceRectanglesRequest()
  473. // let handler = VNImageRequestHandler(ciImage: ciImage)
  474. //
  475. // do {
  476. // try handler.perform([request])
  477. // if let results = request.results, !results.isEmpty {
  478. // // 检测到人脸,添加到数组
  479. // DispatchQueue.main.async {
  480. // facesArray.append(asset)
  481. // }
  482. // }
  483. // } catch {
  484. // print("人脸检测失败: \(error)")
  485. // }
  486. //
  487. // group.leave()
  488. // }
  489. // }
  490. //
  491. // // 等待所有检测完成后更新结果
  492. // group.notify(queue: .main) {
  493. // if !facesArray.isEmpty {
  494. // peopleGroups["包含人脸的照片"] = facesArray
  495. // }
  496. // completion(peopleGroups)
  497. // }
  498. // }
  499. // }
  500. // 识别截图
  501. func fetchScreenshots(from assets: PHFetchResult<PHAsset>,
  502. completion: @escaping ([PHAsset]) -> Void) {
  503. var screenshots: [PHAsset] = []
  504. // 获取系统的截图智能相册
  505. let screenshotAlbums = PHAssetCollection.fetchAssetCollections(
  506. with: .smartAlbum,
  507. subtype: .smartAlbumScreenshots,
  508. options: nil
  509. )
  510. // 从截图相册中获取所有截图
  511. screenshotAlbums.enumerateObjects { collection, _, _ in
  512. let fetchOptions = PHFetchOptions()
  513. let screenshotAssets = PHAsset.fetchAssets(in: collection, options: fetchOptions)
  514. screenshotAssets.enumerateObjects { asset, _, _ in
  515. screenshots.append(asset)
  516. }
  517. }
  518. completion(screenshots)
  519. }
  520. // 修改辅助方法以接受 PHFetchResult<PHAsset>
  521. // private func detectScreenshots(assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  522. // let processingQueue = DispatchQueue(label: "com.yourapp.screenshots.processing", attributes: .concurrent)
  523. // let resultQueue = DispatchQueue(label: "com.yourapp.screenshots.results")
  524. // let group = DispatchGroup()
  525. // let semaphore = DispatchSemaphore(value: 4) // 限制并发数
  526. //
  527. // let screenshots = Atomic<[PHAsset]>([])
  528. //
  529. // // 分批处理
  530. // let totalCount = assets.count
  531. // let batchSize = 50
  532. // let batches = Int(ceil(Float(totalCount) / Float(batchSize)))
  533. //
  534. // for batchIndex in 0..<batches {
  535. // let startIndex = batchIndex * batchSize
  536. // let endIndex = min(startIndex + batchSize, totalCount)
  537. //
  538. // processingQueue.async {
  539. // autoreleasepool {
  540. // for i in startIndex..<endIndex {
  541. // semaphore.wait()
  542. // group.enter()
  543. //
  544. // let asset = assets.object(at: i)
  545. //
  546. // // 检测是否为截图的逻辑
  547. // // ...
  548. //
  549. // // 模拟检测逻辑
  550. // let isScreenshot = asset.pixelWidth == asset.pixelHeight * 16 / 9 ||
  551. // asset.pixelHeight == asset.pixelWidth * 16 / 9
  552. //
  553. // if isScreenshot {
  554. // resultQueue.async {
  555. // screenshots.mutate { $0.append(asset) }
  556. // }
  557. // }
  558. //
  559. // semaphore.signal()
  560. // group.leave()
  561. // }
  562. // }
  563. // }
  564. // }
  565. //
  566. // group.notify(queue: .main) {
  567. // completion(screenshots.value)
  568. // }
  569. // }
  570. // ... existing code ...
  571. func detectBlurryPhotos(from assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  572. var blurryPhotos: [PHAsset] = []
  573. let group = DispatchGroup()
  574. let processingQueue = DispatchQueue(label: "com.app.blurryDetection", attributes: .concurrent)
  575. let resultQueue = DispatchQueue(label: "com.app.blurryResult")
  576. let semaphore = DispatchSemaphore(value: 8) // 增加并发数
  577. // 创建进度追踪
  578. var processedCount = 0
  579. let totalCount = assets.count
  580. // 分批处理,每批处理一部分数据
  581. let batchSize = 50
  582. let batches = Int(ceil(Float(assets.count) / Float(batchSize)))
  583. for batchIndex in 0..<batches {
  584. let startIndex = batchIndex * batchSize
  585. let endIndex = min(startIndex + batchSize, assets.count)
  586. autoreleasepool {
  587. for i in startIndex..<endIndex {
  588. let asset = assets[i]
  589. group.enter()
  590. semaphore.wait()
  591. let options = PHImageRequestOptions()
  592. options.deliveryMode = .fastFormat // 使用快速模式
  593. options.isSynchronous = false
  594. options.resizeMode = .fast
  595. // 进一步降低处理图片的分辨率
  596. PHImageManager.default().requestImage(
  597. for: asset,
  598. targetSize: CGSize(width: 64, height: 64), // 降低分辨率到64x64
  599. contentMode: .aspectFit,
  600. options: options
  601. ) { image, _ in
  602. defer {
  603. semaphore.signal()
  604. }
  605. guard let image = image else {
  606. group.leave()
  607. return
  608. }
  609. processingQueue.async {
  610. // 使用更高效的模糊检测
  611. let isBlurry = self.fastBlurCheck(image)
  612. if isBlurry {
  613. resultQueue.async {
  614. blurryPhotos.append(asset)
  615. }
  616. }
  617. // 更新进度
  618. resultQueue.async {
  619. processedCount += 1
  620. let progress = Float(processedCount) / Float(totalCount)
  621. if processedCount % 100 == 0 || processedCount == totalCount {
  622. DispatchQueue.main.async {
  623. print("模糊检测进度: \(Int(progress * 100))%")
  624. }
  625. }
  626. }
  627. group.leave()
  628. }
  629. }
  630. }
  631. }
  632. // 每批处理完后清理内存
  633. cleanupMemory()
  634. }
  635. group.notify(queue: .main) {
  636. completion(blurryPhotos)
  637. }
  638. }
  639. // 更高效的模糊检测方法
  640. private func fastBlurCheck(_ image: UIImage) -> Bool {
  641. guard let cgImage = image.cgImage else { return false }
  642. // 使用更小的采样区域
  643. let width = cgImage.width
  644. let height = cgImage.height
  645. let stride = 4 // 增加步长,减少处理像素数
  646. // 提前检查图像尺寸是否合法
  647. guard width > (2 * stride), height > (2 * stride) else {
  648. return false
  649. }
  650. // 使用vImage进行快速处理
  651. var buffer = [UInt8](repeating: 0, count: width * height)
  652. let colorSpace = CGColorSpaceCreateDeviceGray()
  653. guard let context = CGContext(
  654. data: &buffer,
  655. width: width,
  656. height: height,
  657. bitsPerComponent: 8,
  658. bytesPerRow: width,
  659. space: colorSpace,
  660. bitmapInfo: CGImageAlphaInfo.none.rawValue
  661. ) else {
  662. return false
  663. }
  664. context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height))
  665. // 使用拉普拉斯算子的简化版本
  666. var score: Double = 0
  667. var sampledPixels = 0
  668. // 只采样图像的一部分区域
  669. let sampleRows = min(10, height / stride)
  670. let sampleCols = min(10, width / stride)
  671. for y in stride(from: stride, to: height - stride, by: stride * sampleRows / 10) {
  672. for x in stride(from: stride, to: width - stride, by: stride * sampleCols / 10) {
  673. let current = Int(buffer[y * width + x])
  674. let left = Int(buffer[y * width + (x - stride)])
  675. let right = Int(buffer[y * width + (x + stride)])
  676. let top = Int(buffer[(y - stride) * width + x])
  677. let bottom = Int(buffer[(y + stride) * width + x])
  678. // 简化的边缘检测
  679. let dx = abs(left - right)
  680. let dy = abs(top - bottom)
  681. score += Double(max(dx, dy))
  682. sampledPixels += 1
  683. }
  684. }
  685. // 避免除以零
  686. guard sampledPixels > 0 else { return false }
  687. // 归一化分数
  688. let normalizedScore = score / Double(sampledPixels)
  689. // 调整阈值
  690. let threshold = 15.0
  691. return normalizedScore < threshold
  692. }
  693. // func detectBlurryPhotos(from assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  694. // var blurryPhotos: [PHAsset] = []
  695. // let group = DispatchGroup()
  696. // let processingQueue = DispatchQueue(label: "com.app.blurryDetection", attributes: .concurrent)
  697. // let resultQueue = DispatchQueue(label: "com.app.blurryResult")
  698. // let semaphore = DispatchSemaphore(value: 5) // 增加并发数
  699. //
  700. // // 创建进度追踪
  701. // var processedCount = 0
  702. // let totalCount = assets.count
  703. //
  704. // for i in 0..<assets.count {
  705. // let asset = assets[i]
  706. // group.enter()
  707. // semaphore.wait()
  708. //
  709. // let options = PHImageRequestOptions()
  710. // options.deliveryMode = .fastFormat // 使用快速模式
  711. // options.isSynchronous = false
  712. // options.resizeMode = .fast
  713. //
  714. // // 降低处理图片的分辨率
  715. // PHImageManager.default().requestImage(
  716. // for: asset,
  717. // targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  718. // contentMode: .aspectFit,
  719. // options: options
  720. // ) { image, _ in
  721. // defer {
  722. // semaphore.signal()
  723. // }
  724. //
  725. // guard let image = image,
  726. // let cgImage = image.cgImage else {
  727. // group.leave()
  728. // return
  729. // }
  730. //
  731. // processingQueue.async {
  732. // // 快速模糊检测
  733. // let isBlurry = self.quickBlurCheck(cgImage)
  734. //
  735. // if isBlurry {
  736. // resultQueue.async {
  737. // blurryPhotos.append(asset)
  738. // }
  739. // }
  740. //
  741. // // 更新进度
  742. // resultQueue.async {
  743. // processedCount += 1
  744. // let progress = Float(processedCount) / Float(totalCount)
  745. // DispatchQueue.main.async {
  746. // print("模糊检测进度: \(Int(progress * 100))%")
  747. // }
  748. // }
  749. //
  750. // group.leave()
  751. // }
  752. // }
  753. // }
  754. //
  755. // group.notify(queue: .main) {
  756. // completion(blurryPhotos)
  757. // }
  758. // }
  759. //
  760. // // 快速模糊检测方法
  761. // private func quickBlurCheck(_ image: CGImage) -> Bool {
  762. //
  763. // let width = image.width
  764. // let height = image.height
  765. // let stride = 2 // 跳过一些像素以加快速度
  766. //
  767. // // 提前检查图像尺寸是否合法
  768. // guard width > (2 * stride), height > (2 * stride) else {
  769. // return false // 小尺寸图像直接判定为模糊或清晰
  770. // }
  771. //
  772. // var buffer = [UInt8](repeating: 0, count: width * height)
  773. //
  774. // let colorSpace = CGColorSpaceCreateDeviceGray()
  775. // guard let context = CGContext(
  776. // data: &buffer,
  777. // width: width,
  778. // height: height,
  779. // bitsPerComponent: 8,
  780. // bytesPerRow: width,
  781. // space: colorSpace,
  782. // bitmapInfo: CGImageAlphaInfo.none.rawValue
  783. // ) else {
  784. // return false
  785. // }
  786. //
  787. // context.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
  788. //
  789. // // 使用简化的拉普拉斯算子
  790. // var score: Double = 0
  791. //
  792. // for y in stride..<(height-stride) where y % stride == 0 {
  793. // for x in stride..<(width-stride) where x % stride == 0 {
  794. // let current = Int(buffer[y * width + x])
  795. // let left = Int(buffer[y * width + (x - stride)])
  796. // let right = Int(buffer[y * width + (x + stride)])
  797. // let top = Int(buffer[(y - stride) * width + x])
  798. // let bottom = Int(buffer[(y + stride) * width + x])
  799. //
  800. // // 简化的边缘检测
  801. // let dx = abs(left - right)
  802. // let dy = abs(top - bottom)
  803. // score += Double(max(dx, dy))
  804. // }
  805. // }
  806. //
  807. // // 归一化分数
  808. // let normalizedScore = score / Double((width * height) / (stride * stride))
  809. //
  810. // // 调整阈值(可能需要根据实际效果调整)
  811. // let threshold = 20.0
  812. // return normalizedScore < threshold
  813. // }
  814. }
  815. extension ClassifyPhoto {
  816. // 获取资源大小的辅助方法
  817. func getAssetSize(_ asset: PHAsset, completion: @escaping (Int64) -> Void) {
  818. DispatchQueue.global(qos: .background).async {
  819. let resources = PHAssetResource.assetResources(for: asset)
  820. if let resource = resources.first {
  821. var size: Int64 = 0
  822. if let unsignedInt64 = resource.value(forKey: "fileSize") as? CLong {
  823. size = Int64(unsignedInt64)
  824. }
  825. DispatchQueue.main.async {
  826. completion(size)
  827. }
  828. } else {
  829. DispatchQueue.main.async {
  830. completion(0)
  831. }
  832. }
  833. }
  834. }
  835. // 计算资产组的总大小
  836. func calculateAssetsSize(_ assets: [PHAsset], completion: @escaping (PhotoSizeInfo) -> Void) {
  837. print("正在计算图片组容量大小")
  838. let group = DispatchGroup()
  839. var totalSize: Int64 = 0
  840. for asset in assets {
  841. group.enter()
  842. getAssetSize(asset) { size in
  843. totalSize += size
  844. group.leave()
  845. }
  846. }
  847. group.notify(queue: .main) {
  848. completion(PhotoSizeInfo(totalSize: totalSize, count: assets.count))
  849. }
  850. }
  851. }
  852. extension ClassifyPhoto {
  853. // 添加一个处理 P3 色彩空间图像的辅助方法
  854. private func processImageWithSafeColorSpace(_ image: UIImage) -> UIImage? {
  855. autoreleasepool {
  856. guard let cgImage = image.cgImage else { return image }
  857. // 检查色彩空间
  858. if let colorSpace = cgImage.colorSpace,
  859. (colorSpace.name as String?) == CGColorSpace.displayP3 as String {
  860. // 转换为 sRGB 色彩空间
  861. let sRGBColorSpace = CGColorSpaceCreateDeviceRGB()
  862. if let context = CGContext(
  863. data: nil,
  864. width: cgImage.width,
  865. height: cgImage.height,
  866. bitsPerComponent: 8,
  867. bytesPerRow: 0,
  868. space: sRGBColorSpace,
  869. bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
  870. ) {
  871. context.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
  872. if let convertedImage = context.makeImage() {
  873. return UIImage(cgImage: convertedImage, scale: image.scale, orientation: image.imageOrientation)
  874. }
  875. }
  876. }
  877. return image
  878. }
  879. }
  880. // 修改图像请求方法,添加色彩空间处理
  881. private func requestImageWithSafeProcessing(
  882. for asset: PHAsset,
  883. targetSize: CGSize,
  884. contentMode: PHImageContentMode,
  885. options: PHImageRequestOptions?,
  886. completion: @escaping (UIImage?) -> Void
  887. ) {
  888. PHImageManager.default().requestImage(
  889. for: asset,
  890. targetSize: targetSize,
  891. contentMode: contentMode,
  892. options: options
  893. ) { image, info in
  894. guard let image = image else {
  895. completion(nil)
  896. return
  897. }
  898. // 处理可能的 P3 色彩空间图像
  899. DispatchQueue.global(qos: .userInitiated).async {
  900. let processedImage = self.processImageWithSafeColorSpace(image)
  901. DispatchQueue.main.async {
  902. completion(processedImage)
  903. }
  904. }
  905. }
  906. }
  907. }
  908. class Atomic<T> {
  909. private var value_: T
  910. private let lock = NSLock()
  911. init(_ value: T) {
  912. self.value_ = value
  913. }
  914. var value: T {
  915. lock.lock()
  916. defer { lock.unlock() }
  917. return value_
  918. }
  919. func mutate(_ mutation: (inout T) -> Void) {
  920. lock.lock()
  921. defer { lock.unlock() }
  922. mutation(&value_)
  923. }
  924. }