ClassifyPhoto.swift 51 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260
  1. import Photos
  2. import Vision
  3. class ClassifyPhoto {
  4. struct PhotoSizeInfo {
  5. var totalSize: Int64 = 0
  6. var count: Int = 0
  7. }
  8. struct ClassifiedPhotos {
  9. var screenshots: [PHAsset] = []
  10. var locations: [String: [PHAsset]] = [:] // 按地点分组
  11. var people: [String: [PHAsset]] = [:] // 按人物分组
  12. var similarPhotos: [[PHAsset]] = [] // 存储相似照片组
  13. var blurryPhotos: [PHAsset] = [] // 添加模糊照片数组
  14. // 添加容量信息
  15. var screenshotsSize: PhotoSizeInfo = PhotoSizeInfo()
  16. var locationsSize: PhotoSizeInfo = PhotoSizeInfo()
  17. var peopleSize: PhotoSizeInfo = PhotoSizeInfo()
  18. var similarPhotosSize: PhotoSizeInfo = PhotoSizeInfo()
  19. var blurryPhotosSize: PhotoSizeInfo = PhotoSizeInfo() // 添加模糊照片容量信息
  20. }
  21. // 添加位置缓存
  22. private var locationCache: [String: String] = [:]
  23. func classifyPhotos(
  24. assets: PHFetchResult<PHAsset>,
  25. progressHandler: @escaping (String, Float) -> Void,
  26. completion: @escaping (ClassifiedPhotos) -> Void
  27. ) {
  28. // 在后台队列处理
  29. DispatchQueue.global(qos: .userInitiated).async {
  30. var result = ClassifiedPhotos()
  31. let group = DispatchGroup()
  32. // 开始处理
  33. DispatchQueue.main.async {
  34. progressHandler("正在加载照片...", 0.0)
  35. }
  36. // 先处理模糊照片检测(占进度的 30%)
  37. group.enter()
  38. progressHandler("正在检测模糊照片...", 0.0)
  39. self.detectBlurryPhotos(from: assets) { blurryPhotos in
  40. result.blurryPhotos = blurryPhotos
  41. progressHandler("模糊照片检测完成", 0.3)
  42. group.leave()
  43. }
  44. // 1. 检测截图 (占总进度的 20%)
  45. group.enter()
  46. self.fetchScreenshots(from: assets) { screenshots in
  47. result.screenshots = screenshots
  48. DispatchQueue.main.async {
  49. progressHandler("正在检测截图...", 0.3)
  50. }
  51. group.leave()
  52. }
  53. // 2. 检测相似照片 (占总进度的 80%)
  54. group.enter()
  55. self.detectSimilarPhotos(
  56. assets: assets,
  57. progressHandler: { stage, progress in
  58. // 将相似照片检测的进度映射到 20%-100% 的范围
  59. let mappedProgress = 0.3 + (progress * 0.6)
  60. DispatchQueue.main.async {
  61. progressHandler(stage, mappedProgress)
  62. }
  63. }
  64. ) { similarPhotos in
  65. result.similarPhotos = similarPhotos
  66. group.leave()
  67. }
  68. // 3. 按地点分类 (占总进度的 20%)
  69. // group.enter()
  70. // self.classifyByLocation(assets: assets) { locationGroups in
  71. // result.locations = locationGroups
  72. // DispatchQueue.main.async {
  73. // progressHandler("正在按地点分类...", 0.8)
  74. // }
  75. // group.leave()
  76. // }
  77. // 4. 按人物分类 (占总进度的 20%)
  78. group.enter()
  79. self.classifyByPeople(assets: assets) { peopleGroups in
  80. result.people = peopleGroups
  81. DispatchQueue.main.async {
  82. progressHandler("正在按人物分类...", 1.0)
  83. }
  84. group.leave()
  85. }
  86. // // 添加模糊照片检测
  87. // group.enter()
  88. // self.detectBlurryPhotos(from: assets) { blurryPhotos in
  89. // result.blurryPhotos = blurryPhotos
  90. // DispatchQueue.main.async {
  91. // progressHandler("正在检测模糊照片...", 1.0)
  92. // }
  93. // group.leave()
  94. // }
  95. // 在所有分类完成后计算大小
  96. group.notify(queue: .main) {
  97. let sizeGroup = DispatchGroup()
  98. // 计算模糊照片大小
  99. sizeGroup.enter()
  100. self.calculateAssetsSize(result.blurryPhotos) { sizeInfo in
  101. result.blurryPhotosSize = sizeInfo
  102. sizeGroup.leave()
  103. }
  104. // 计算相似照片大小
  105. sizeGroup.enter()
  106. let similarAssets = Array(result.similarPhotos.flatMap { $0 })
  107. self.calculateAssetsSize(similarAssets) { sizeInfo in
  108. result.similarPhotosSize = sizeInfo
  109. sizeGroup.leave()
  110. }
  111. // 计算截图大小
  112. sizeGroup.enter()
  113. self.calculateAssetsSize(result.screenshots) { sizeInfo in
  114. result.screenshotsSize = sizeInfo
  115. sizeGroup.leave()
  116. }
  117. // // 计算地点照片大小
  118. // sizeGroup.enter()
  119. // let locationAssets = Array(result.locations.values.flatMap { $0 })
  120. // self.calculateAssetsSize(locationAssets) { sizeInfo in
  121. // result.locationsSize = sizeInfo
  122. // sizeGroup.leave()
  123. // }
  124. // 计算人物照片大小
  125. sizeGroup.enter()
  126. let peopleAssets = Array(result.people.values.flatMap { $0 })
  127. self.calculateAssetsSize(peopleAssets) { sizeInfo in
  128. result.peopleSize = sizeInfo
  129. sizeGroup.leave()
  130. }
  131. // 所有大小计算完成后回调
  132. sizeGroup.notify(queue: .main) {
  133. progressHandler("分类完成", 1.0)
  134. completion(result)
  135. }
  136. }
  137. }
  138. }
  139. // 添加内存清理辅助方法
  140. private func cleanupMemory() {
  141. // 清理图像缓存
  142. URLCache.shared.removeAllCachedResponses()
  143. // 强制进行一次垃圾回收
  144. autoreleasepool {
  145. let _ = [String](repeating: "temp", count: 1)
  146. }
  147. #if os(iOS)
  148. // 发送低内存警告
  149. UIApplication.shared.perform(Selector(("_performMemoryWarning")))
  150. #endif
  151. }
  152. func detectSimilarPhotos(
  153. assets: PHFetchResult<PHAsset>,
  154. progressHandler: @escaping (String, Float) -> Void,
  155. completion: @escaping ([[PHAsset]]) -> Void
  156. ) {
  157. var similarGroups: [[PHAsset]] = []
  158. let group = DispatchGroup()
  159. if #available(iOS 13.0, *) {
  160. var imageFeatures: [(asset: PHAsset, feature: VNFeaturePrintObservation)] = []
  161. // 创建处理队列
  162. let processingQueue = DispatchQueue(label: "com.app.similarPhotos", qos: .userInitiated)
  163. let semaphore = DispatchSemaphore(value: 4) // 增加并发数以提高效率
  164. // 1. 提取所有图片的特征
  165. let totalAssets = assets.count
  166. var processedAssets = 0
  167. progressHandler("正在加载照片...", 0.0)
  168. for i in 0..<assets.count {
  169. let asset = assets[i]
  170. group.enter()
  171. semaphore.wait()
  172. let options = PHImageRequestOptions()
  173. options.deliveryMode = .fastFormat // 使用快速模式
  174. options.isSynchronous = false
  175. options.resizeMode = .fast
  176. DispatchQueue.global(qos: .background).async {
  177. PHImageManager.default().requestImage(
  178. for: asset,
  179. targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  180. contentMode: .aspectFit,
  181. options: options
  182. ) { image, _ in
  183. defer {
  184. semaphore.signal()
  185. }
  186. guard let image = image,
  187. let cgImage = image.cgImage else {
  188. group.leave()
  189. return
  190. }
  191. processingQueue.async {
  192. do {
  193. let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
  194. let request = VNGenerateImageFeaturePrintRequest()
  195. try requestHandler.perform([request])
  196. if let result = request.results?.first as? VNFeaturePrintObservation {
  197. imageFeatures.append((asset, result))
  198. // 更新特征提取进度
  199. processedAssets += 1
  200. let progress = Float(processedAssets) / Float(totalAssets)
  201. progressHandler("正在提取特征...", progress * 0.6)
  202. }
  203. } catch {
  204. print("特征提取失败: \(error)")
  205. }
  206. group.leave()
  207. }
  208. }
  209. }
  210. }
  211. group.notify(queue: processingQueue) {
  212. progressHandler("正在比较相似度...", 0.6)
  213. // 近似度
  214. let similarityThreshold: Float = 0.7
  215. var similarGroups: [[PHAsset]] = []
  216. // 使用并行处理来加速比较
  217. let processingGroup = DispatchGroup()
  218. let processingQueue = DispatchQueue(label: "com.yourapp.similarity.processing", attributes: .concurrent)
  219. let resultsQueue = DispatchQueue(label: "com.yourapp.similarity.results")
  220. let semaphore = DispatchSemaphore(value: 4) // 减少并发数量
  221. // 创建一个线程安全的数据结构来存储结果
  222. var processedIndices = Atomic<Set<Int>>(Set<Int>())
  223. var groupResults = Atomic<[Int: [PHAsset]]>([:])
  224. // 分批处理,每批处理一部分数据
  225. let batchSize = min(50, imageFeatures.count)
  226. // 修复 Float 转换错误
  227. let batchCount = Float(imageFeatures.count) / Float(batchSize)
  228. let batches = batchCount.isFinite ? Int(ceil(batchCount)) : 1
  229. for batchIndex in 0..<batches {
  230. let startIndex = batchIndex * batchSize
  231. let endIndex = min(startIndex + batchSize, imageFeatures.count)
  232. for i in startIndex..<endIndex {
  233. // 检查是否已处理
  234. if processedIndices.value.contains(i) { continue }
  235. semaphore.wait()
  236. processingGroup.enter()
  237. processingQueue.async {
  238. // 再次检查,因为可能在等待期间被其他线程处理
  239. if processedIndices.value.contains(i) {
  240. semaphore.signal()
  241. processingGroup.leave()
  242. return
  243. }
  244. var similarAssets: [PHAsset] = [imageFeatures[i].asset]
  245. processedIndices.mutate { $0.insert(i) }
  246. for j in (i + 1)..<imageFeatures.count {
  247. // 检查是否已处理
  248. if processedIndices.value.contains(j) { continue }
  249. do {
  250. var distance: Float = 0
  251. try imageFeatures[i].feature.computeDistance(&distance, to: imageFeatures[j].feature)
  252. // 检查距离值是否有效
  253. if distance.isNaN || distance.isInfinite {
  254. print("警告: 检测到无效的距离值")
  255. continue
  256. }
  257. // 确保距离在有效范围内
  258. distance = max(0, min(1, distance))
  259. let similarity = 1 - distance
  260. if similarity >= similarityThreshold {
  261. similarAssets.append(imageFeatures[j].asset)
  262. processedIndices.mutate { $0.insert(j) }
  263. }
  264. } catch {
  265. print("相似度计算失败: \(error)")
  266. }
  267. }
  268. // 只保存有多个相似图像的组
  269. if similarAssets.count > 1 {
  270. resultsQueue.async {
  271. groupResults.mutate { $0[i] = similarAssets }
  272. }
  273. }
  274. // 更新进度 - 添加安全检查
  275. if imageFeatures.count > 0 {
  276. let processedCount = Float(processedIndices.value.count)
  277. let totalCount = Float(imageFeatures.count)
  278. // 确保进度值有效
  279. var progress: Float = 0
  280. if processedCount.isFinite && totalCount.isFinite && totalCount > 0 {
  281. progress = processedCount / totalCount
  282. // 限制进度范围
  283. progress = max(0, min(1, progress))
  284. }
  285. DispatchQueue.main.async {
  286. progressHandler("正在比较相似度...", 0.6 + progress * 0.4)
  287. }
  288. }
  289. semaphore.signal()
  290. processingGroup.leave()
  291. }
  292. }
  293. }
  294. processingGroup.wait()
  295. // 整理结果
  296. similarGroups = Array(groupResults.value.values)
  297. // 按照照片数量降序排序
  298. similarGroups.sort { $0.count > $1.count }
  299. DispatchQueue.main.async {
  300. completion(similarGroups)
  301. }
  302. }
  303. }
  304. }
  305. func classifyByLocation(assets: PHFetchResult<PHAsset>,
  306. completion: @escaping ([String: [PHAsset]]) -> Void) {
  307. var locationGroups: [String: [PHAsset]] = [:]
  308. let group = DispatchGroup()
  309. let geocodeQueue = DispatchQueue(label: "com.app.geocoding")
  310. let semaphore = DispatchSemaphore(value: 10) // 限制并发请求数
  311. assets.enumerateObjects { asset, _, _ in
  312. if let location = asset.location {
  313. group.enter()
  314. semaphore.wait()
  315. geocodeQueue.async {
  316. let geocoder = CLGeocoder()
  317. geocoder.reverseGeocodeLocation(location) { placemarks, error in
  318. defer {
  319. semaphore.signal()
  320. group.leave()
  321. }
  322. if let placemark = placemarks?.first {
  323. let locationName = self.formatLocationName(placemark)
  324. DispatchQueue.main.async {
  325. if locationGroups[locationName] == nil {
  326. locationGroups[locationName] = []
  327. }
  328. locationGroups[locationName]?.append(asset)
  329. }
  330. }
  331. }
  332. }
  333. }
  334. }
  335. // 等待所有地理编码完成后回调
  336. group.notify(queue: .main) {
  337. completion(locationGroups)
  338. }
  339. }
  340. // 格式化地点名称(只返回城市名)
  341. func formatLocationName(_ placemark: CLPlacemark) -> String {
  342. if let city = placemark.locality {
  343. return city
  344. } else if let area = placemark.administrativeArea {
  345. return area
  346. }
  347. return "其他"
  348. }
  349. func classifyByPeople(assets: PHFetchResult<PHAsset>,
  350. completion: @escaping ([String: [PHAsset]]) -> Void) {
  351. // 创建结果字典
  352. var peopleGroups: [String: [PHAsset]] = [:]
  353. peopleGroups["包含人脸的照片"] = []
  354. // 使用主队列确保安全完成
  355. let mainCompletion: ([String: [PHAsset]]) -> Void = { result in
  356. DispatchQueue.main.async {
  357. completion(result)
  358. }
  359. }
  360. // 限制处理的照片数量,防止内存过载
  361. let totalCount = min(500, assets.count)
  362. if totalCount == 0 {
  363. mainCompletion(peopleGroups)
  364. return
  365. }
  366. // 创建专用队列
  367. let processingQueue = DispatchQueue(label: "com.app.peopleDetection", qos: .userInitiated, attributes: .concurrent)
  368. let resultQueue = DispatchQueue(label: "com.app.peopleResult", qos: .userInitiated)
  369. // 使用NSLock替代原子操作,更安全
  370. let resultLock = NSLock()
  371. // 创建进度追踪
  372. let processedCount = Atomic<Int>(0)
  373. // 分批处理,每批处理一部分数据
  374. let batchSize = 20
  375. let batches = Int(ceil(Float(totalCount) / Float(batchSize)))
  376. // 创建一个组来等待所有操作完成
  377. let group = DispatchGroup()
  378. // 创建一个Vision请求处理器
  379. let faceDetectionRequest = VNDetectFaceRectanglesRequest()
  380. // 防止过早释放
  381. var strongSelf: AnyObject? = self
  382. for batchIndex in 0..<batches {
  383. let startIndex = batchIndex * batchSize
  384. let endIndex = min(startIndex + batchSize, totalCount)
  385. // 每批处理前进入组
  386. group.enter()
  387. // 使用延迟减轻主线程压力
  388. DispatchQueue.global(qos: .userInitiated).asyncAfter(deadline: .now() + Double(batchIndex) * 0.3) { [weak self] in
  389. guard let self = self else {
  390. group.leave()
  391. return
  392. }
  393. // 创建批次内的处理组
  394. let batchGroup = DispatchGroup()
  395. // 限制并发数
  396. let batchSemaphore = DispatchSemaphore(value: 2)
  397. for i in startIndex..<endIndex {
  398. batchGroup.enter()
  399. batchSemaphore.wait()
  400. processingQueue.async {
  401. // 使用自动释放池减少内存占用
  402. autoreleasepool {
  403. let asset = assets[i]
  404. // 降低处理图片的分辨率
  405. let options = PHImageRequestOptions()
  406. options.deliveryMode = .fastFormat
  407. options.isSynchronous = false
  408. options.resizeMode = .fast
  409. options.isNetworkAccessAllowed = false
  410. PHImageManager.default().requestImage(
  411. for: asset,
  412. targetSize: CGSize(width: 120, height: 120),
  413. contentMode: .aspectFit,
  414. options: options
  415. ) { image, info in
  416. defer {
  417. batchSemaphore.signal()
  418. batchGroup.leave()
  419. }
  420. // 检查是否是降级的图像
  421. if let degraded = info?[PHImageResultIsDegradedKey] as? Bool, degraded {
  422. return
  423. }
  424. guard let image = image, let cgImage = image.cgImage else {
  425. return
  426. }
  427. // 使用简化的人脸检测
  428. let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
  429. do {
  430. try handler.perform([faceDetectionRequest])
  431. if let results = faceDetectionRequest.results, !results.isEmpty {
  432. // 检测到人脸,添加到数组
  433. resultLock.lock()
  434. peopleGroups["包含人脸的照片"]?.append(asset)
  435. resultLock.unlock()
  436. }
  437. } catch {
  438. print("人脸检测失败: \(error)")
  439. }
  440. // 更新进度
  441. processedCount.mutate { $0 += 1 }
  442. }
  443. }
  444. }
  445. }
  446. // 等待批次内所有处理完成
  447. batchGroup.wait()
  448. // 每批处理完后清理内存
  449. self.cleanupMemory()
  450. // 批次完成
  451. group.leave()
  452. }
  453. }
  454. // 设置超时保护
  455. let timeoutWorkItem = DispatchWorkItem {
  456. print("人脸检测超时,返回当前结果")
  457. mainCompletion(peopleGroups)
  458. strongSelf = nil
  459. }
  460. // 30秒后超时
  461. DispatchQueue.global().asyncAfter(deadline: .now() + 30, execute: timeoutWorkItem)
  462. // 等待所有检测完成后更新结果
  463. group.notify(queue: .main) {
  464. // 取消超时
  465. timeoutWorkItem.cancel()
  466. // 最终清理内存
  467. self.cleanupMemory()
  468. // 返回结果
  469. mainCompletion(peopleGroups)
  470. // 释放引用
  471. strongSelf = nil
  472. }
  473. }
  474. // 优化的人脸检测方法
  475. private func optimizedFaceDetection(in image: UIImage, request: VNDetectFaceRectanglesRequest, completion: @escaping (Bool) -> Void) {
  476. guard let cgImage = image.cgImage else {
  477. completion(false)
  478. return
  479. }
  480. // 在后台线程执行检测
  481. DispatchQueue.global(qos: .userInitiated).async {
  482. autoreleasepool {
  483. let handler = VNImageRequestHandler(cgImage: cgImage, options: [:])
  484. do {
  485. try handler.perform([request])
  486. let hasFace = request.results?.isEmpty == false
  487. completion(hasFace)
  488. } catch {
  489. print("人脸检测失败: \(error)")
  490. completion(false)
  491. }
  492. }
  493. }
  494. }
  495. // func classifyByPeople(assets: PHFetchResult<PHAsset>,
  496. // completion: @escaping ([String: [PHAsset]]) -> Void) {
  497. // var peopleGroups: [String: [PHAsset]] = [:]
  498. // let group = DispatchGroup()
  499. //
  500. // // 创建专用队列和信号量控制并发
  501. // let processingQueue = DispatchQueue(label: "com.app.peopleDetection", qos: .userInitiated, attributes: .concurrent)
  502. // let resultQueue = DispatchQueue(label: "com.app.peopleResult", qos: .userInitiated)
  503. // let semaphore = DispatchSemaphore(value: 4) // 限制并发数
  504. //
  505. // // 创建进度追踪
  506. // var processedCount = 0
  507. // let totalCount = assets.count
  508. //
  509. // // 分批处理,每批处理一部分数据
  510. // let batchSize = 50
  511. // let batches = Int(ceil(Float(assets.count) / Float(batchSize)))
  512. //
  513. // for batchIndex in 0..<batches {
  514. // let startIndex = batchIndex * batchSize
  515. // let endIndex = min(startIndex + batchSize, assets.count)
  516. //
  517. // // 使用自动释放池减少内存占用
  518. // autoreleasepool {
  519. // for i in startIndex..<endIndex {
  520. // let asset = assets[i]
  521. // group.enter()
  522. // semaphore.wait()
  523. //
  524. // // 降低处理图片的分辨率
  525. // let options = PHImageRequestOptions()
  526. // options.deliveryMode = .fastFormat
  527. // options.isSynchronous = false
  528. // options.resizeMode = .fast
  529. //
  530. // processingQueue.async {
  531. // // 使用自动释放池减少内存占用
  532. // autoreleasepool {
  533. // let _ = PHImageManager.default().requestImage(
  534. // for: asset,
  535. // targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  536. // contentMode: .aspectFit,
  537. // options: options
  538. // ) { image, _ in
  539. // defer {
  540. // semaphore.signal()
  541. // }
  542. //
  543. // guard let image = image else {
  544. // group.leave()
  545. // return
  546. // }
  547. //
  548. // // 使用 Vision 框架检测人脸
  549. // guard let ciImage = CIImage(image: image) else {
  550. // group.leave()
  551. // return
  552. // }
  553. //
  554. // let request = VNDetectFaceRectanglesRequest()
  555. // let handler = VNImageRequestHandler(ciImage: ciImage, options: [:])
  556. //
  557. // do {
  558. // try handler.perform([request])
  559. // if let results = request.results, !results.isEmpty {
  560. // // 检测到人脸,添加到数组
  561. // resultQueue.async {
  562. // if peopleGroups["包含人脸的照片"] == nil {
  563. // peopleGroups["包含人脸的照片"] = []
  564. // }
  565. // peopleGroups["包含人脸的照片"]?.append(asset)
  566. // }
  567. // }
  568. // } catch {
  569. // print("人脸检测失败: \(error)")
  570. // }
  571. //
  572. // // 更新进度
  573. // resultQueue.async {
  574. // processedCount += 1
  575. // let progress = Float(processedCount) / Float(totalCount)
  576. // if processedCount % 100 == 0 || processedCount == totalCount {
  577. // DispatchQueue.main.async {
  578. // print("人脸检测进度: \(Int(progress * 100))%")
  579. // }
  580. // }
  581. // }
  582. //
  583. // group.leave()
  584. // }
  585. // }
  586. // }
  587. // }
  588. // }
  589. //
  590. // // 每批处理完后清理内存
  591. // cleanupMemory()
  592. // }
  593. //
  594. // // 等待所有检测完成后更新结果
  595. // group.notify(queue: .main) {
  596. // completion(peopleGroups)
  597. // }
  598. // }
  599. // // 添加内存清理方法(如果还没有)
  600. // private func cleanupMemory() {
  601. // // 强制清理内存
  602. // autoreleasepool {
  603. // // 触发内存警告,促使系统回收内存
  604. // UIApplication.shared.performSelector(onMainThread: #selector(UIApplication.beginIgnoringInteractionEvents), with: nil, waitUntilDone: true)
  605. // UIApplication.shared.performSelector(onMainThread: #selector(UIApplication.endIgnoringInteractionEvents), with: nil, waitUntilDone: true)
  606. // }
  607. // }
  608. // 按人物分类
  609. // func classifyByPeople(assets: PHFetchResult<PHAsset>,
  610. // completion: @escaping ([String: [PHAsset]]) -> Void) {
  611. // var peopleGroups: [String: [PHAsset]] = [:]
  612. // let group = DispatchGroup()
  613. //
  614. // DispatchQueue.global(qos: .background).async {
  615. // // 创建一个数组来存储检测到人脸的照片
  616. // var facesArray: [PHAsset] = []
  617. //
  618. // // 遍历所有照片
  619. // assets.enumerateObjects { asset, _, _ in
  620. // group.enter()
  621. //
  622. // // 获取照片的缩略图进行人脸检测
  623. // let options = PHImageRequestOptions()
  624. // options.isSynchronous = false
  625. // options.deliveryMode = .fastFormat
  626. //
  627. // PHImageManager.default().requestImage(
  628. // for: asset,
  629. // targetSize: CGSize(width: 128, height: 128), // 使用较小的尺寸提高性能
  630. // contentMode: .aspectFit,
  631. // options: options
  632. // ) { image, _ in
  633. // guard let image = image else {
  634. // group.leave()
  635. // return
  636. // }
  637. //
  638. // // 使用 Vision 框架检测人脸
  639. // guard let ciImage = CIImage(image: image) else {
  640. // group.leave()
  641. // return
  642. // }
  643. //
  644. // let request = VNDetectFaceRectanglesRequest()
  645. // let handler = VNImageRequestHandler(ciImage: ciImage)
  646. //
  647. // do {
  648. // try handler.perform([request])
  649. // if let results = request.results, !results.isEmpty {
  650. // // 检测到人脸,添加到数组
  651. // DispatchQueue.main.async {
  652. // facesArray.append(asset)
  653. // }
  654. // }
  655. // } catch {
  656. // print("人脸检测失败: \(error)")
  657. // }
  658. //
  659. // group.leave()
  660. // }
  661. // }
  662. //
  663. // // 等待所有检测完成后更新结果
  664. // group.notify(queue: .main) {
  665. // if !facesArray.isEmpty {
  666. // peopleGroups["包含人脸的照片"] = facesArray
  667. // }
  668. // completion(peopleGroups)
  669. // }
  670. // }
  671. // }
  672. // 识别截图
  673. func fetchScreenshots(from assets: PHFetchResult<PHAsset>,
  674. completion: @escaping ([PHAsset]) -> Void) {
  675. var screenshots: [PHAsset] = []
  676. // 获取系统的截图智能相册
  677. let screenshotAlbums = PHAssetCollection.fetchAssetCollections(
  678. with: .smartAlbum,
  679. subtype: .smartAlbumScreenshots,
  680. options: nil
  681. )
  682. // 从截图相册中获取所有截图
  683. screenshotAlbums.enumerateObjects { collection, _, _ in
  684. let fetchOptions = PHFetchOptions()
  685. let screenshotAssets = PHAsset.fetchAssets(in: collection, options: fetchOptions)
  686. screenshotAssets.enumerateObjects { asset, _, _ in
  687. screenshots.append(asset)
  688. }
  689. }
  690. completion(screenshots)
  691. }
  692. // 修改辅助方法以接受 PHFetchResult<PHAsset>
  693. // private func detectScreenshots(assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  694. // let processingQueue = DispatchQueue(label: "com.yourapp.screenshots.processing", attributes: .concurrent)
  695. // let resultQueue = DispatchQueue(label: "com.yourapp.screenshots.results")
  696. // let group = DispatchGroup()
  697. // let semaphore = DispatchSemaphore(value: 4) // 限制并发数
  698. //
  699. // let screenshots = Atomic<[PHAsset]>([])
  700. //
  701. // // 分批处理
  702. // let totalCount = assets.count
  703. // let batchSize = 50
  704. // let batches = Int(ceil(Float(totalCount) / Float(batchSize)))
  705. //
  706. // for batchIndex in 0..<batches {
  707. // let startIndex = batchIndex * batchSize
  708. // let endIndex = min(startIndex + batchSize, totalCount)
  709. //
  710. // processingQueue.async {
  711. // autoreleasepool {
  712. // for i in startIndex..<endIndex {
  713. // semaphore.wait()
  714. // group.enter()
  715. //
  716. // let asset = assets.object(at: i)
  717. //
  718. // // 检测是否为截图的逻辑
  719. // // ...
  720. //
  721. // // 模拟检测逻辑
  722. // let isScreenshot = asset.pixelWidth == asset.pixelHeight * 16 / 9 ||
  723. // asset.pixelHeight == asset.pixelWidth * 16 / 9
  724. //
  725. // if isScreenshot {
  726. // resultQueue.async {
  727. // screenshots.mutate { $0.append(asset) }
  728. // }
  729. // }
  730. //
  731. // semaphore.signal()
  732. // group.leave()
  733. // }
  734. // }
  735. // }
  736. // }
  737. //
  738. // group.notify(queue: .main) {
  739. // completion(screenshots.value)
  740. // }
  741. // }
  742. // ... existing code ...
  743. func detectBlurryPhotos(from assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  744. var blurryPhotos: [PHAsset] = []
  745. let group = DispatchGroup()
  746. let processingQueue = DispatchQueue(label: "com.app.blurryDetection", attributes: .concurrent)
  747. let resultQueue = DispatchQueue(label: "com.app.blurryResult")
  748. let semaphore = DispatchSemaphore(value: 8) // 增加并发数
  749. // 创建进度追踪
  750. var processedCount = 0
  751. let totalCount = assets.count
  752. // 分批处理,每批处理一部分数据
  753. let batchSize = 50
  754. let batches = Int(ceil(Float(assets.count) / Float(batchSize)))
  755. for batchIndex in 0..<batches {
  756. let startIndex = batchIndex * batchSize
  757. let endIndex = min(startIndex + batchSize, assets.count)
  758. autoreleasepool {
  759. for i in startIndex..<endIndex {
  760. let asset = assets[i]
  761. group.enter()
  762. semaphore.wait()
  763. let options = PHImageRequestOptions()
  764. options.deliveryMode = .fastFormat // 使用快速模式
  765. options.isSynchronous = false
  766. options.resizeMode = .fast
  767. // 进一步降低处理图片的分辨率
  768. PHImageManager.default().requestImage(
  769. for: asset,
  770. targetSize: CGSize(width: 64, height: 64), // 降低分辨率到64x64
  771. contentMode: .aspectFit,
  772. options: options
  773. ) { image, _ in
  774. defer {
  775. semaphore.signal()
  776. }
  777. guard let image = image else {
  778. group.leave()
  779. return
  780. }
  781. processingQueue.async {
  782. // 使用更高效的模糊检测
  783. let isBlurry = self.fastBlurCheck(image)
  784. if isBlurry {
  785. resultQueue.async {
  786. blurryPhotos.append(asset)
  787. }
  788. }
  789. // 更新进度
  790. resultQueue.async {
  791. processedCount += 1
  792. let progress = Float(processedCount) / Float(totalCount)
  793. if processedCount % 100 == 0 || processedCount == totalCount {
  794. DispatchQueue.main.async {
  795. print("模糊检测进度: \(Int(progress * 100))%")
  796. }
  797. }
  798. }
  799. group.leave()
  800. }
  801. }
  802. }
  803. }
  804. // 每批处理完后清理内存
  805. cleanupMemory()
  806. }
  807. group.notify(queue: .main) {
  808. completion(blurryPhotos)
  809. }
  810. }
  811. // 更高效的模糊检测方法
  812. private func fastBlurCheck(_ image: UIImage) -> Bool {
  813. guard let cgImage = image.cgImage else { return false }
  814. // 使用更小的采样区域
  815. let width = cgImage.width
  816. let height = cgImage.height
  817. let pixelStride = 4 // 改名为pixelStride,避免与函数名冲突
  818. // 提前检查图像尺寸是否合法
  819. guard width > (2 * pixelStride), height > (2 * pixelStride) else {
  820. return false
  821. }
  822. // 使用vImage进行快速处理
  823. var buffer = [UInt8](repeating: 0, count: width * height)
  824. let colorSpace = CGColorSpaceCreateDeviceGray()
  825. guard let context = CGContext(
  826. data: &buffer,
  827. width: width,
  828. height: height,
  829. bitsPerComponent: 8,
  830. bytesPerRow: width,
  831. space: colorSpace,
  832. bitmapInfo: CGImageAlphaInfo.none.rawValue
  833. ) else {
  834. return false
  835. }
  836. context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height))
  837. // 使用拉普拉斯算子的简化版本
  838. var score: Double = 0
  839. var sampledPixels = 0
  840. // 只采样图像的一部分区域
  841. let sampleRows = 10
  842. let sampleCols = 10
  843. // 计算步长
  844. let rowStep = max(1, height / sampleRows)
  845. let colStep = max(1, width / sampleCols)
  846. // 使用Swift的stride函数
  847. for y in Swift.stride(from: pixelStride, to: height - pixelStride, by: rowStep) {
  848. for x in Swift.stride(from: pixelStride, to: width - pixelStride, by: colStep) {
  849. let current = Int(buffer[y * width + x])
  850. let left = Int(buffer[y * width + (x - pixelStride)])
  851. let right = Int(buffer[y * width + (x + pixelStride)])
  852. let top = Int(buffer[(y - pixelStride) * width + x])
  853. let bottom = Int(buffer[(y + pixelStride) * width + x])
  854. // 简化的边缘检测
  855. let dx = abs(left - right)
  856. let dy = abs(top - bottom)
  857. score += Double(max(dx, dy))
  858. sampledPixels += 1
  859. }
  860. }
  861. // 避免除以零
  862. guard sampledPixels > 0 else { return false }
  863. // 归一化分数
  864. let normalizedScore = score / Double(sampledPixels)
  865. // 调整阈值
  866. let threshold = 15.0
  867. return normalizedScore < threshold
  868. }
  869. // ... existing code ...
  870. // func detectBlurryPhotos(from assets: PHFetchResult<PHAsset>, completion: @escaping ([PHAsset]) -> Void) {
  871. // var blurryPhotos: [PHAsset] = []
  872. // let group = DispatchGroup()
  873. // let processingQueue = DispatchQueue(label: "com.app.blurryDetection", attributes: .concurrent)
  874. // let resultQueue = DispatchQueue(label: "com.app.blurryResult")
  875. // let semaphore = DispatchSemaphore(value: 5) // 增加并发数
  876. //
  877. // // 创建进度追踪
  878. // var processedCount = 0
  879. // let totalCount = assets.count
  880. //
  881. // for i in 0..<assets.count {
  882. // let asset = assets[i]
  883. // group.enter()
  884. // semaphore.wait()
  885. //
  886. // let options = PHImageRequestOptions()
  887. // options.deliveryMode = .fastFormat // 使用快速模式
  888. // options.isSynchronous = false
  889. // options.resizeMode = .fast
  890. //
  891. // // 降低处理图片的分辨率
  892. // PHImageManager.default().requestImage(
  893. // for: asset,
  894. // targetSize: CGSize(width: 128, height: 128), // 降低分辨率
  895. // contentMode: .aspectFit,
  896. // options: options
  897. // ) { image, _ in
  898. // defer {
  899. // semaphore.signal()
  900. // }
  901. //
  902. // guard let image = image,
  903. // let cgImage = image.cgImage else {
  904. // group.leave()
  905. // return
  906. // }
  907. //
  908. // processingQueue.async {
  909. // // 快速模糊检测
  910. // let isBlurry = self.quickBlurCheck(cgImage)
  911. //
  912. // if isBlurry {
  913. // resultQueue.async {
  914. // blurryPhotos.append(asset)
  915. // }
  916. // }
  917. //
  918. // // 更新进度
  919. // resultQueue.async {
  920. // processedCount += 1
  921. // let progress = Float(processedCount) / Float(totalCount)
  922. // DispatchQueue.main.async {
  923. // print("模糊检测进度: \(Int(progress * 100))%")
  924. // }
  925. // }
  926. //
  927. // group.leave()
  928. // }
  929. // }
  930. // }
  931. //
  932. // group.notify(queue: .main) {
  933. // completion(blurryPhotos)
  934. // }
  935. // }
  936. //
  937. // // 快速模糊检测方法
  938. // private func quickBlurCheck(_ image: CGImage) -> Bool {
  939. //
  940. // let width = image.width
  941. // let height = image.height
  942. // let stride = 2 // 跳过一些像素以加快速度
  943. //
  944. // // 提前检查图像尺寸是否合法
  945. // guard width > (2 * stride), height > (2 * stride) else {
  946. // return false // 小尺寸图像直接判定为模糊或清晰
  947. // }
  948. //
  949. // var buffer = [UInt8](repeating: 0, count: width * height)
  950. //
  951. // let colorSpace = CGColorSpaceCreateDeviceGray()
  952. // guard let context = CGContext(
  953. // data: &buffer,
  954. // width: width,
  955. // height: height,
  956. // bitsPerComponent: 8,
  957. // bytesPerRow: width,
  958. // space: colorSpace,
  959. // bitmapInfo: CGImageAlphaInfo.none.rawValue
  960. // ) else {
  961. // return false
  962. // }
  963. //
  964. // context.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
  965. //
  966. // // 使用简化的拉普拉斯算子
  967. // var score: Double = 0
  968. //
  969. // for y in stride..<(height-stride) where y % stride == 0 {
  970. // for x in stride..<(width-stride) where x % stride == 0 {
  971. // let current = Int(buffer[y * width + x])
  972. // let left = Int(buffer[y * width + (x - stride)])
  973. // let right = Int(buffer[y * width + (x + stride)])
  974. // let top = Int(buffer[(y - stride) * width + x])
  975. // let bottom = Int(buffer[(y + stride) * width + x])
  976. //
  977. // // 简化的边缘检测
  978. // let dx = abs(left - right)
  979. // let dy = abs(top - bottom)
  980. // score += Double(max(dx, dy))
  981. // }
  982. // }
  983. //
  984. // // 归一化分数
  985. // let normalizedScore = score / Double((width * height) / (stride * stride))
  986. //
  987. // // 调整阈值(可能需要根据实际效果调整)
  988. // let threshold = 20.0
  989. // return normalizedScore < threshold
  990. // }
  991. }
  992. extension ClassifyPhoto {
  993. // 获取资源大小的辅助方法
  994. func getAssetSize(_ asset: PHAsset, completion: @escaping (Int64) -> Void) {
  995. DispatchQueue.global(qos: .background).async {
  996. let resources = PHAssetResource.assetResources(for: asset)
  997. if let resource = resources.first {
  998. var size: Int64 = 0
  999. if let unsignedInt64 = resource.value(forKey: "fileSize") as? CLong {
  1000. size = Int64(unsignedInt64)
  1001. }
  1002. DispatchQueue.main.async {
  1003. completion(size)
  1004. }
  1005. } else {
  1006. DispatchQueue.main.async {
  1007. completion(0)
  1008. }
  1009. }
  1010. }
  1011. }
  1012. // 计算资产组的总大小
  1013. func calculateAssetsSize(_ assets: [PHAsset], completion: @escaping (PhotoSizeInfo) -> Void) {
  1014. print("正在计算图片组容量大小")
  1015. let group = DispatchGroup()
  1016. var totalSize: Int64 = 0
  1017. for asset in assets {
  1018. group.enter()
  1019. getAssetSize(asset) { size in
  1020. totalSize += size
  1021. group.leave()
  1022. }
  1023. }
  1024. group.notify(queue: .main) {
  1025. completion(PhotoSizeInfo(totalSize: totalSize, count: assets.count))
  1026. }
  1027. }
  1028. }
  1029. extension ClassifyPhoto {
  1030. // 添加一个处理 P3 色彩空间图像的辅助方法
  1031. private func processImageWithSafeColorSpace(_ image: UIImage) -> UIImage? {
  1032. autoreleasepool {
  1033. guard let cgImage = image.cgImage else { return image }
  1034. // 检查色彩空间
  1035. if let colorSpace = cgImage.colorSpace,
  1036. (colorSpace.name as String?) == CGColorSpace.displayP3 as String {
  1037. // 转换为 sRGB 色彩空间
  1038. let sRGBColorSpace = CGColorSpaceCreateDeviceRGB()
  1039. if let context = CGContext(
  1040. data: nil,
  1041. width: cgImage.width,
  1042. height: cgImage.height,
  1043. bitsPerComponent: 8,
  1044. bytesPerRow: 0,
  1045. space: sRGBColorSpace,
  1046. bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
  1047. ) {
  1048. context.draw(cgImage, in: CGRect(x: 0, y: 0, width: cgImage.width, height: cgImage.height))
  1049. if let convertedImage = context.makeImage() {
  1050. return UIImage(cgImage: convertedImage, scale: image.scale, orientation: image.imageOrientation)
  1051. }
  1052. }
  1053. }
  1054. return image
  1055. }
  1056. }
  1057. // 修改图像请求方法,添加色彩空间处理
  1058. private func requestImageWithSafeProcessing(
  1059. for asset: PHAsset,
  1060. targetSize: CGSize,
  1061. contentMode: PHImageContentMode,
  1062. options: PHImageRequestOptions?,
  1063. completion: @escaping (UIImage?) -> Void
  1064. ) {
  1065. PHImageManager.default().requestImage(
  1066. for: asset,
  1067. targetSize: targetSize,
  1068. contentMode: contentMode,
  1069. options: options
  1070. ) { image, info in
  1071. guard let image = image else {
  1072. completion(nil)
  1073. return
  1074. }
  1075. // 处理可能的 P3 色彩空间图像
  1076. DispatchQueue.global(qos: .userInitiated).async {
  1077. let processedImage = self.processImageWithSafeColorSpace(image)
  1078. DispatchQueue.main.async {
  1079. completion(processedImage)
  1080. }
  1081. }
  1082. }
  1083. }
  1084. }
  1085. class Atomic<T> {
  1086. private var value_: T
  1087. private let lock = NSLock()
  1088. init(_ value: T) {
  1089. self.value_ = value
  1090. }
  1091. var value: T {
  1092. lock.lock()
  1093. defer { lock.unlock() }
  1094. return value_
  1095. }
  1096. func mutate(_ mutation: (inout T) -> Void) {
  1097. lock.lock()
  1098. defer { lock.unlock() }
  1099. mutation(&value_)
  1100. }
  1101. }