Explorar el Código

调整 live 到可设置壁纸状态

100Years hace 3 meses
padre
commit
7b6706be0c

+ 5 - 0
Bridging-Header.h

@@ -0,0 +1,5 @@
+//
+//  Use this file to import your target's public headers that you would like to expose to Swift.
+//
+
+#import "LivePhotoUtil.h"

+ 43 - 41
TSLiveWallpaper.xcodeproj/project.pbxproj

@@ -80,15 +80,16 @@
 		A84C239C2D1E3A4300B61B55 /* GPVideoClipperView.swift in Sources */ = {isa = PBXBuildFile; fileRef = A84C23972D1E3A4300B61B55 /* GPVideoClipperView.swift */; };
 		A84C239D2D1E3A4300B61B55 /* GPVideoConfigMaker.swift in Sources */ = {isa = PBXBuildFile; fileRef = A84C23952D1E3A4300B61B55 /* GPVideoConfigMaker.swift */; };
 		A84C239F2D1E88CD00B61B55 /* TSFileManagerTool.swift in Sources */ = {isa = PBXBuildFile; fileRef = A84C239E2D1E88C500B61B55 /* TSFileManagerTool.swift */; };
-		A858EE172D1CF49B004B680F /* LivePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = A858EE162D1CF49B004B680F /* LivePhoto.swift */; };
 		A8C4C01D2D2397B9003C46FC /* UIViewController+Ex.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C01C2D2397B4003C46FC /* UIViewController+Ex.swift */; };
-		A8C4C0272D23CD88003C46FC /* AVAssetExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0202D23CD88003C46FC /* AVAssetExtension.swift */; };
-		A8C4C0282D23CD88003C46FC /* LivePhotoCreater.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0242D23CD88003C46FC /* LivePhotoCreater.swift */; };
-		A8C4C0292D23CD88003C46FC /* VideoRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0252D23CD88003C46FC /* VideoRecorder.swift */; };
-		A8C4C02A2D23CD88003C46FC /* Converter4Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0222D23CD88003C46FC /* Converter4Video.swift */; };
-		A8C4C02B2D23CD88003C46FC /* Converter4Image.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0212D23CD88003C46FC /* Converter4Image.swift */; };
-		A8C4C02D2D23D261003C46FC /* metadata.mov in Resources */ = {isa = PBXBuildFile; fileRef = A8C4C02C2D23D261003C46FC /* metadata.mov */; };
-		A8C4C02F2D23DCCC003C46FC /* 1.mov in Resources */ = {isa = PBXBuildFile; fileRef = A8C4C02E2D23DCCC003C46FC /* 1.mov */; };
+		A8C4C0982D242154003C46FC /* LivePhoto.swift in Sources */ = {isa = PBXBuildFile; fileRef = A858EE162D1CF49B004B680F /* LivePhoto.swift */; };
+		A8C4C0A22D24218A003C46FC /* origin.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = A8C4C09F2D24218A003C46FC /* origin.mp4 */; };
+		A8C4C0A32D24218A003C46FC /* 1.mov in Resources */ = {isa = PBXBuildFile; fileRef = A8C4C0A02D24218A003C46FC /* 1.mov */; };
+		A8C4C0A42D24218A003C46FC /* metadata.mov in Resources */ = {isa = PBXBuildFile; fileRef = A8C4C09E2D24218A003C46FC /* metadata.mov */; };
+		A8C4C0A52D24218A003C46FC /* Converter4Video.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C09B2D24218A003C46FC /* Converter4Video.swift */; };
+		A8C4C0A62D24218A003C46FC /* AVAssetExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0992D24218A003C46FC /* AVAssetExtension.swift */; };
+		A8C4C0A72D24218A003C46FC /* LivePhotoUtil.m in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C09D2D24218A003C46FC /* LivePhotoUtil.m */; };
+		A8C4C0A82D24218A003C46FC /* Converter4Image.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C09A2D24218A003C46FC /* Converter4Image.swift */; };
+		A8C4C0AB2D2427E7003C46FC /* LivePhotoConverter.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8C4C0AA2D2427D3003C46FC /* LivePhotoConverter.swift */; };
 		A8E56BF62D1520EC003C54AF /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A8E56BEC2D1520EC003C54AF /* AppDelegate.swift */; };
 		A8E56BF92D1520EC003C54AF /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A8E56BED2D1520EC003C54AF /* Assets.xcassets */; };
 		A8E56BFB2D1520EC003C54AF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A8E56BF02D1520EC003C54AF /* LaunchScreen.storyboard */; };
@@ -175,13 +176,16 @@
 		A84C239E2D1E88C500B61B55 /* TSFileManagerTool.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TSFileManagerTool.swift; sourceTree = "<group>"; };
 		A858EE162D1CF49B004B680F /* LivePhoto.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivePhoto.swift; sourceTree = "<group>"; };
 		A8C4C01C2D2397B4003C46FC /* UIViewController+Ex.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIViewController+Ex.swift"; sourceTree = "<group>"; };
-		A8C4C0202D23CD88003C46FC /* AVAssetExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAssetExtension.swift; sourceTree = "<group>"; };
-		A8C4C0212D23CD88003C46FC /* Converter4Image.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Converter4Image.swift; sourceTree = "<group>"; };
-		A8C4C0222D23CD88003C46FC /* Converter4Video.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Converter4Video.swift; sourceTree = "<group>"; };
-		A8C4C0242D23CD88003C46FC /* LivePhotoCreater.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivePhotoCreater.swift; sourceTree = "<group>"; };
-		A8C4C0252D23CD88003C46FC /* VideoRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoRecorder.swift; sourceTree = "<group>"; };
-		A8C4C02C2D23D261003C46FC /* metadata.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = metadata.mov; sourceTree = "<group>"; };
-		A8C4C02E2D23DCCC003C46FC /* 1.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = 1.mov; sourceTree = "<group>"; };
+		A8C4C0992D24218A003C46FC /* AVAssetExtension.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAssetExtension.swift; sourceTree = "<group>"; };
+		A8C4C09A2D24218A003C46FC /* Converter4Image.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Converter4Image.swift; sourceTree = "<group>"; };
+		A8C4C09B2D24218A003C46FC /* Converter4Video.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Converter4Video.swift; sourceTree = "<group>"; };
+		A8C4C09C2D24218A003C46FC /* LivePhotoUtil.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = LivePhotoUtil.h; sourceTree = "<group>"; };
+		A8C4C09D2D24218A003C46FC /* LivePhotoUtil.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = LivePhotoUtil.m; sourceTree = "<group>"; };
+		A8C4C09E2D24218A003C46FC /* metadata.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = metadata.mov; sourceTree = "<group>"; };
+		A8C4C09F2D24218A003C46FC /* origin.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = origin.mp4; sourceTree = "<group>"; };
+		A8C4C0A02D24218A003C46FC /* 1.mov */ = {isa = PBXFileReference; lastKnownFileType = video.quicktime; path = 1.mov; sourceTree = "<group>"; };
+		A8C4C0A92D242204003C46FC /* Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Bridging-Header.h"; sourceTree = "<group>"; };
+		A8C4C0AA2D2427D3003C46FC /* LivePhotoConverter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivePhotoConverter.swift; sourceTree = "<group>"; };
 		A8E56BD42D1520DD003C54AF /* TSLiveWallpaper.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = TSLiveWallpaper.app; sourceTree = BUILT_PRODUCTS_DIR; };
 		A8E56BEC2D1520EC003C54AF /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; };
 		A8E56BED2D1520EC003C54AF /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = "<group>"; };
@@ -504,7 +508,6 @@
 		A839463D2D1D6F9400ABFF0D /* EasyVC */ = {
 			isa = PBXGroup;
 			children = (
-				A8F778B62D1BE9A100BF55D5 /* TSLiveWallpaperBrowseVC.swift */,
 				A839463E2D1D6FB600ABFF0D /* TSLiveWallpaperTutorialsVC.swift */,
 				A83946422D1D701300ABFF0D /* TSLiveWallpaperCopyrightVC.swift */,
 			);
@@ -542,34 +545,28 @@
 		A858EE182D1CF635004B680F /* ThirdParty */ = {
 			isa = PBXGroup;
 			children = (
-				A8C4C0262D23CD88003C46FC /* LivePhoto */,
+				A8C4C0A12D24218A003C46FC /* Util */,
 				A858EE162D1CF49B004B680F /* LivePhoto.swift */,
 			);
 			path = ThirdParty;
 			sourceTree = "<group>";
 		};
-		A8C4C0232D23CD88003C46FC /* Util */ = {
+		A8C4C0A12D24218A003C46FC /* Util */ = {
 			isa = PBXGroup;
 			children = (
-				A8C4C0202D23CD88003C46FC /* AVAssetExtension.swift */,
-				A8C4C0212D23CD88003C46FC /* Converter4Image.swift */,
-				A8C4C0222D23CD88003C46FC /* Converter4Video.swift */,
+				A8C4C0AA2D2427D3003C46FC /* LivePhotoConverter.swift */,
+				A8C4C0992D24218A003C46FC /* AVAssetExtension.swift */,
+				A8C4C09A2D24218A003C46FC /* Converter4Image.swift */,
+				A8C4C09B2D24218A003C46FC /* Converter4Video.swift */,
+				A8C4C09C2D24218A003C46FC /* LivePhotoUtil.h */,
+				A8C4C09D2D24218A003C46FC /* LivePhotoUtil.m */,
+				A8C4C09E2D24218A003C46FC /* metadata.mov */,
+				A8C4C09F2D24218A003C46FC /* origin.mp4 */,
+				A8C4C0A02D24218A003C46FC /* 1.mov */,
 			);
 			path = Util;
 			sourceTree = "<group>";
 		};
-		A8C4C0262D23CD88003C46FC /* LivePhoto */ = {
-			isa = PBXGroup;
-			children = (
-				A8C4C0232D23CD88003C46FC /* Util */,
-				A8C4C0242D23CD88003C46FC /* LivePhotoCreater.swift */,
-				A8C4C02C2D23D261003C46FC /* metadata.mov */,
-				A8C4C02E2D23DCCC003C46FC /* 1.mov */,
-				A8C4C0252D23CD88003C46FC /* VideoRecorder.swift */,
-			);
-			path = LivePhoto;
-			sourceTree = "<group>";
-		};
 		A8E56BCB2D1520DD003C54AF = {
 			isa = PBXGroup;
 			children = (
@@ -577,6 +574,7 @@
 				A8E56BD52D1520DD003C54AF /* Products */,
 				26583F0148474A7C756E32C9 /* Pods */,
 				D901B78947260557CA1FA83C /* Frameworks */,
+				A8C4C0A92D242204003C46FC /* Bridging-Header.h */,
 			);
 			sourceTree = "<group>";
 		};
@@ -607,6 +605,7 @@
 		A8F778B52D1BE98D00BF55D5 /* TSLiveWallpaperBrowseVC */ = {
 			isa = PBXGroup;
 			children = (
+				A8F778B62D1BE9A100BF55D5 /* TSLiveWallpaperBrowseVC.swift */,
 				A839463D2D1D6F9400ABFF0D /* EasyVC */,
 			);
 			path = TSLiveWallpaperBrowseVC;
@@ -682,11 +681,12 @@
 			isa = PBXResourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				A8C4C0A22D24218A003C46FC /* origin.mp4 in Resources */,
+				A8C4C0A32D24218A003C46FC /* 1.mov in Resources */,
+				A8C4C0A42D24218A003C46FC /* metadata.mov in Resources */,
 				A8E56BF92D1520EC003C54AF /* Assets.xcassets in Resources */,
-				A8C4C02D2D23D261003C46FC /* metadata.mov in Resources */,
 				A81F5B522D19685900740085 /* response.json in Resources */,
 				A8E56BFB2D1520EC003C54AF /* LaunchScreen.storyboard in Resources */,
-				A8C4C02F2D23DCCC003C46FC /* 1.mov in Resources */,
 			);
 			runOnlyForDeploymentPostprocessing = 0;
 		};
@@ -748,12 +748,12 @@
 				A81CA4832D157F5C00A3AAC8 /* UIImageView+Ex.swift in Sources */,
 				A81F5B322D18FA2E00740085 /* Component.swift in Sources */,
 				A81F5B332D18FA2E00740085 /* CommonSectionComponent.swift in Sources */,
-				A858EE172D1CF49B004B680F /* LivePhoto.swift in Sources */,
 				A81F5B492D1956EA00740085 /* UIScreen.swift in Sources */,
 				A81F5B342D18FA2E00740085 /* CollectionViewComponent.swift in Sources */,
 				A81CA4722D1575B900A3AAC8 /* TSBaseNavigationBarView.swift in Sources */,
 				A81F5B5B2D1A5F2300740085 /* TSHomeTopBannerCell.swift in Sources */,
 				A83946312D1D66A000ABFF0D /* TSTermsServiceVC.swift in Sources */,
+				A8C4C0982D242154003C46FC /* LivePhoto.swift in Sources */,
 				A81F5B472D19562800740085 /* EditorVideoControlMaskView.swift in Sources */,
 				A81F5B392D19037800740085 /* TSBaseModel.swift in Sources */,
 				A81CA4A62D16915F00A3AAC8 /* Dictionary+Ex.swift in Sources */,
@@ -790,11 +790,6 @@
 				A84C239C2D1E3A4300B61B55 /* GPVideoClipperView.swift in Sources */,
 				A84C239D2D1E3A4300B61B55 /* GPVideoConfigMaker.swift in Sources */,
 				A839463C2D1D6E3600ABFF0D /* TSRandomWallpaperCopyrightVC.swift in Sources */,
-				A8C4C0272D23CD88003C46FC /* AVAssetExtension.swift in Sources */,
-				A8C4C0282D23CD88003C46FC /* LivePhotoCreater.swift in Sources */,
-				A8C4C0292D23CD88003C46FC /* VideoRecorder.swift in Sources */,
-				A8C4C02A2D23CD88003C46FC /* Converter4Video.swift in Sources */,
-				A8C4C02B2D23CD88003C46FC /* Converter4Image.swift in Sources */,
 				A81CA4AE2D16944B00A3AAC8 /* TSBaseCollectionCell.swift in Sources */,
 				A81CA4772D15779E00A3AAC8 /* UIColor+Ex.swift in Sources */,
 				A81CA4952D1652B500A3AAC8 /* TSEditLiveVC.swift in Sources */,
@@ -817,7 +812,12 @@
 				A8F778B72D1BE9A500BF55D5 /* TSLiveWallpaperBrowseVC.swift in Sources */,
 				A81CA48F2D15857B00A3AAC8 /* TSTabBarController.swift in Sources */,
 				A81CA4B02D1694C900A3AAC8 /* Date+Ex.swift in Sources */,
+				A8C4C0A52D24218A003C46FC /* Converter4Video.swift in Sources */,
+				A8C4C0A62D24218A003C46FC /* AVAssetExtension.swift in Sources */,
+				A8C4C0A72D24218A003C46FC /* LivePhotoUtil.m in Sources */,
+				A8C4C0A82D24218A003C46FC /* Converter4Image.swift in Sources */,
 				A83946432D1D701500ABFF0D /* TSLiveWallpaperCopyrightVC.swift in Sources */,
+				A8C4C0AB2D2427E7003C46FC /* LivePhotoConverter.swift in Sources */,
 				A81F5B3C2D19087100740085 /* TSRandomWallpaperCell.swift in Sources */,
 				A81F5B442D19559C00740085 /* EditorVideoControlViewCell.swift in Sources */,
 				A81F5B452D19559C00740085 /* EditorVideoControlView.swift in Sources */,
@@ -869,6 +869,7 @@
 				PRODUCT_NAME = "$(TARGET_NAME)";
 				PROVISIONING_PROFILE_SPECIFIER = "";
 				SWIFT_EMIT_LOC_STRINGS = YES;
+				SWIFT_OBJC_BRIDGING_HEADER = "Bridging-Header.h";
 				SWIFT_VERSION = 5.0;
 				TARGETED_DEVICE_FAMILY = "1,2";
 			};
@@ -903,6 +904,7 @@
 				PRODUCT_NAME = "$(TARGET_NAME)";
 				PROVISIONING_PROFILE_SPECIFIER = "";
 				SWIFT_EMIT_LOC_STRINGS = YES;
+				SWIFT_OBJC_BRIDGING_HEADER = "Bridging-Header.h";
 				SWIFT_VERSION = 5.0;
 				TARGETED_DEVICE_FAMILY = "1,2";
 			};

+ 2 - 2
TSLiveWallpaper/Business/TSEditLiveVC/TSEditLiveEidtCell.swift

@@ -7,7 +7,7 @@
 
 
 class TSEditLiveSectionModel: CollectionViewSectionComponent{
-    var style:ImageDataStyple = .homeLiveList
+    var style:ImageDataStyple = .homeLiveBanner
     var items:[TSEditLiveItemModel] = [TSEditLiveItemModel]()
 
     var sectionInset: UIEdgeInsets {
@@ -31,7 +31,7 @@ class TSEditLiveSectionModel: CollectionViewSectionComponent{
 
 }
 class TSEditLiveItemModel: CollectionViewCellComponent {
-    var style:ImageDataStyple = .homeLiveList
+    var style:ImageDataStyple = .homeLiveBanner
     
     var imageUrl:String = ""
     var videoUrl:String = ""

+ 234 - 137
TSLiveWallpaper/Business/TSEditLiveVC/TSEditLiveVC.swift

@@ -9,6 +9,19 @@ import PhotosUI
 
 class TSEditLiveVC: TSBaseVC, UINavigationControllerDelegate {
     
+    lazy var editLiveSectionModel: TSEditLiveSectionModel = {
+        let section = TSEditLiveSectionModel()
+        section.items = [TSEditLiveItemModel()]
+        return section
+    }()
+
+    lazy var editLiveHistorySectionModel: TSImageDataSectionModel = {
+        return kImageDataCenterShared.editLiveHistoryListArray.first!
+    }()
+    
+    var dataArray:[Component] = [Component]()
+
+    
     lazy var navBarView: TSBaseNavContentBarView = {
         let navBarView = TSBaseNavContentBarView()
         let titleImageView = UIImageView.createImageView(imageName: "nav_title_editlive",contentMode: .scaleToFill)
@@ -23,8 +36,42 @@ class TSEditLiveVC: TSBaseVC, UINavigationControllerDelegate {
         return navBarView
     }()
 
-    var editLiveHistoryListArray:[TSImageDataSectionModel] = kImageDataCenterShared.editLiveHistoryListArray
     
+    lazy var collectionComponent: CollectionViewComponent = {
+        let layout = UICollectionViewFlowLayout()
+        let cp = CollectionViewComponent(frame: CGRect.zero, layout: layout, attributes: [ :])
+        cp.collectionView.contentInset = UIEdgeInsets(top: 0, left: 0, bottom: k_Height_TabBar, right: 0)
+        
+        cp.itemActionHandler = { [weak self] cellCp, indexPath in
+            guard let self = self else { return }
+            self.openVideoPicker()
+        }
+        
+        cp.itemDidSelectedHandler = { [weak self] (object, indexPath) in
+            guard let self = self else { return }
+            let obj = dataArray.safeObj(At: indexPath.section)
+            if let liveModel = obj as? TSImageDataSectionModel {
+                if liveModel.style == .homeLiveList{
+                    let vc = TSLiveWallpaperBrowseVC(itemModels: liveModel.items,currentIndex: indexPath.row)
+                    vc.isCanDelete = true
+                    vc.deleteCompletion = {[weak self] item in
+                        guard let self = self else { return }
+                        if let itemModel = editLiveHistorySectionModel.items.safeObj(At: item) {
+                            editLiveHistorySectionModel.items.remove(at: item)
+                            TSFileManagerTool.removeItem(from: itemModel.imageUrl.fillCacheURL)
+                            TSFileManagerTool.removeItem(from: itemModel.videoUrl.fillCacheURL)
+                            kImageDataCenterShared.editLiveHistoryListArray = [editLiveHistorySectionModel]
+                            reloadView()
+                        }
+                    }
+                    kPresentModalVC(target: self, modelVC: vc)
+                }
+            }
+        }
+
+        return cp
+    }()
+
     override func createView() {
         
         setViewBgImageNamed(named: "view_main_bg")
@@ -34,162 +81,72 @@ class TSEditLiveVC: TSBaseVC, UINavigationControllerDelegate {
             make.edges.equalToSuperview()
         }
         
-        let videoUpImageView = UIImageView.createImageView(imageName: "video_up")
-        contentView.addSubview(videoUpImageView)
-        videoUpImageView.snp.makeConstraints { make in
-            make.top.equalTo(130)
-            make.centerX.equalToSuperview()
-            make.width.height.equalTo(153)
-        }
-        
-        let videoText = UILabel.createLabel(text: "Upload Video".localized,font: UIFont.systemFont(ofSize: 16, weight: .medium),textColor: .white,textAlignment: .center)
-        contentView.addSubview(videoText)
-        videoText.snp.makeConstraints { make in
-            make.leading.equalTo(16)
-            make.trailing.equalTo(-16)
-            make.top.equalTo(videoUpImageView.snp.bottom).offset(12)
-            make.height.equalTo(16)
-        }
-        
-        let diyBtn: UIButton = TSViewTool.createNormalSubmitBtn(title: "DIY Live Wallpaper".localized, imageNamed: "edit_black") { [weak self]  in
-            guard let self = self else { return }
-            self.pick(.any(of: [.videos]))
+        contentView.addSubview(collectionComponent.collectionView)
+        collectionComponent.collectionView.snp.makeConstraints { make in
+            make.edges.equalToSuperview()
         }
         
-        diyBtn.cornerRadius = 30
-        contentView.addSubview(diyBtn)
-        diyBtn.snp.makeConstraints { make in
-            make.leading.equalTo(16)
-            make.trailing.equalTo(-16)
-            make.top.equalTo(videoText.snp.bottom).offset(53)
-            make.height.equalTo(60)
-        }
+        reloadView()
     }
     
-    
-    func saveLive(videoPath:URL){
-        
-        TSToastShared.showLoading()
-        
-        LivePhoto.generate(from: nil, videoURL: videoPath) { progress in
-            
-        } completion: {[weak self] (livePhoto, resources) in
-            guard let self = self else { return }
-            
-            if let resources = resources {
-                LivePhoto.saveToLibrary(resources, completion: { (success) in
-                    kExecuteOnMainThread {
-                        TSToastShared.hideLoading()
-                        if success {
-                            debugPrint("Live Photo Saved,The live photo was successfully saved to Photos.")
-                            kSavePhotoSuccesswShared.show(atView: self.view)
-                        }else {
-                            debugPrint("Live Photo Not Saved,The live photo was not saved to Photos.")
-                        }
-                        
-                        TSFileManagerTool.removeItem(from: resources.pairedImage)
-                        TSFileManagerTool.removeItem(from: resources.pairedVideo)
-                    }
-                })
-            }
+    func reloadView(){
+        collectionComponent.clear()
+        if editLiveHistorySectionModel.items.count  > 0 {
+            dataArray = [editLiveSectionModel,editLiveHistorySectionModel]
+        }else{
+            dataArray = [editLiveSectionModel]
         }
+        collectionComponent.reloadView(with:dataArray)
     }
 }
 
-
-// MARK: - PHPickerViewControllerDelegate
-
-extension TSEditLiveVC: PHPickerViewControllerDelegate {
-    
-    /// Present `PHPickerViewController`
-    func pick(_ filter: PHPickerFilter) {
-        var config = PHPickerConfiguration()
-        config.filter = filter
-        config.selectionLimit = 1
-        config.preferredAssetRepresentationMode = .current
-        let picker = PHPickerViewController(configuration: config)
+import UniformTypeIdentifiers
+extension TSEditLiveVC: UIImagePickerControllerDelegate {
+    func openVideoPicker() {
+        TSToastShared.showLoading(in: self.view)
+        let picker = UIImagePickerController()
+        picker.sourceType = .photoLibrary
+        picker.mediaTypes =  [UTType.movie.identifier] // 仅允许选择视频
+        picker.allowsEditing = true // 启用编辑功能
         picker.delegate = self
-        picker.modalPresentationStyle = .overFullScreen
+        picker.videoMaximumDuration = 3.0
         present(picker, animated: true, completion: nil)
     }
-    
-    func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
-        defer { picker.dismiss(animated: true) }
-        assemblePicker(picker, didFinishPicking: results)
-    }
-
-    func assemblePicker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
-        guard let itemProvider = results.first?.itemProvider else {
-            return
-        }
+    // 用户完成选择
+    func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]) {
         
-        if itemProvider.hasItemConformingToTypeIdentifier(UTType.movie.identifier) {
-            itemProvider.loadFileRepresentation(forTypeIdentifier: itemProvider.registeredTypeIdentifiers.first!) { [weak self] url, error in
-                guard let self, let url = url else {
-                    return
-                }
-                do {
-                    let cachesDirectory = try self.cachesDirectory()
-                    let targetURL = cachesDirectory.appendingPathComponent("assemblePickerVideo").appendingPathExtension(url.pathExtension)
-                    let fileManager = FileManager.default
-                    // 如果目标路径存在同名文件,先删除旧文件
-                    if fileManager.fileExists(atPath: targetURL.path) {
-                        try fileManager.removeItem(at: targetURL)
-                    }
-                    
-                    try fileManager.copyItem(at: url, to: targetURL)
-                    
-                    kExecuteOnMainThread {
-                        self.openVideoClipperVC(videoURL: targetURL)
-                    }
+        TSToastShared.hideLoading()
+        if let editedURL = info[.mediaURL] as? URL {
+            debugPrint("Selected video: \(editedURL)")
+            // 在这里处理选中的视频(例如上传或保存)
 
-                } catch {
-                    TSToastShared.showToast(message: "An error occurred")
-                }
-            }
+            let cachesDirectory = TSFileManagerTool.editLiveVideoPathURL
+            let targetURL = cachesDirectory.appendingPathComponent("assemblePickerVideo").appendingPathExtension(editedURL.pathExtension)
+            TSFileManagerTool.copyFileWithOverwrite(from: editedURL, to: targetURL)
+            
+            
+//            LivePhotoUtil.convertVideo(targetURL.path) { success, msg in
+//                debugPrint(msg)
+//            }
+
+//            LivePhotoConverter.convertVideo(targetURL) { success, image, video, msg in
+//                debugPrint(msg)
+//            }
+            
+            saveLive(videoPath: targetURL)
         }
+        picker.dismiss(animated: true, completion: nil)
     }
-    
-    private func cachesDirectory() throws -> URL {
-        let cachesDirectoryURL = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
-        let cachesDirectory = cachesDirectoryURL.appendingPathComponent("asemble", isDirectory: true)
-        if !FileManager.default.fileExists(atPath: cachesDirectory.absoluteString) {
-            try FileManager.default.createDirectory(at: cachesDirectory, withIntermediateDirectories: true, attributes: nil)
-        }
-        return cachesDirectory
+
+    // 用户取消选择
+    func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
+        TSToastShared.hideLoading()
+        picker.dismiss(animated: true, completion: nil)
     }
 }
 
-//import UniformTypeIdentifiers
-//extension TSEditLiveVC: UIImagePickerControllerDelegate {
-//    func openVideoPicker() {
-//        let picker = UIImagePickerController()
-//        picker.sourceType = .photoLibrary
-//        picker.mediaTypes =  [UTType.movie.identifier] // 仅允许选择视频
-//        picker.allowsEditing = true // 启用编辑功能
-//        picker.delegate = self
-//        picker.videoMaximumDuration = 3.0
-//        present(picker, animated: true, completion: nil)
-//    }
-//    // 用户完成选择
-//    func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey: Any]) {
-//        if let editedURL = info[.mediaURL] as? URL {
-//            debugPrint("Selected video: \(editedURL)")
-//            // 在这里处理选中的视频(例如上传或保存)
-//            saveLive(videoPath: editedURL)
-//        }
-//        picker.dismiss(animated: true, completion: nil)
-//    }
-//
-//    // 用户取消选择
-//    func imagePickerControllerDidCancel(_ picker: UIImagePickerController) {
-//        picker.dismiss(animated: true, completion: nil)
-//    }
-//}
-
 extension TSEditLiveVC{
     
-    
     func openVideoClipperVC(videoURL:URL) {
             let clipperController = GPVideoClipperController.clipperWithVideoURL(videoURL, makerBlock: { (maker) in
                 maker.startTime = 0
@@ -209,6 +166,146 @@ extension TSEditLiveVC{
         
         kPresentModalVC(target: self, modelVC: clipperController)
     }
+    
+    
+    func saveLive(videoPath:URL){
+        TSToastShared.showLoading()
+        LivePhotoConverter.convertVideo(videoPath) { success, imageURL, videoURL, errorMsg in
+            if success {
+                debugPrint("Live Photo Saved,The live photo was successfully saved to Photos.")
+                
+                if let imageURL = imageURL,let videoURL = videoURL {
+                    LivePhotoConverter.saveToLibrary(videoURL: videoURL, imageURL: imageURL) { success in
+                        kSavePhotoSuccesswShared.show(atView: self.view)
+                    }
+                    
+                    
+                    let saveURL = TSFileManagerTool.saveLiveVideoPathURL
+                    let timestampString = Date.timestampString
+                    let saveImageURL = saveURL.appendingPathComponent("image\(timestampString).heic")
+                    let saveVideoURL = saveURL.appendingPathComponent("video\(timestampString).mov")
+                    TSFileManagerTool.copyFileWithOverwrite(from: imageURL, to: saveImageURL)
+                    TSFileManagerTool.copyFileWithOverwrite(from: videoURL, to: saveVideoURL)
+                    
+                    
+                    let itemModel = TSImageDataItemModel()
+                    itemModel.imageUrl = TSFileManagerTool.getCacheSubPath(at: saveImageURL)!
+                    itemModel.videoUrl = TSFileManagerTool.getCacheSubPath(at: saveVideoURL)!
+                    self.editLiveHistorySectionModel.items.append(itemModel)
+                    kImageDataCenterShared.editLiveHistoryListArray = [self.editLiveHistorySectionModel]
+                    self.reloadView()
+                }
+            }else {
+                debugPrint("Live Photo Not Saved,The live photo was not saved to Photos.")
+            }
+        }
+
+//        LivePhoto.generate(from: nil, videoURL: videoPath) { progress in
+//
+//        } completion: {[weak self] (livePhoto, resources) in
+//            guard let self = self else { return }
+//
+//            if let resources = resources {
+//                LivePhoto.saveToLibrary(resources, completion: { (success) in
+//                    kExecuteOnMainThread {
+//                        TSToastShared.hideLoading()
+//                        if success {
+//                            debugPrint("Live Photo Saved,The live photo was successfully saved to Photos.")
+//                            kSavePhotoSuccesswShared.show(atView: self.view)
+//                        }else {
+//                            debugPrint("Live Photo Not Saved,The live photo was not saved to Photos.")
+//                        }
+//
+//                        TSFileManagerTool.removeItem(from: resources.pairedImage)
+//                        TSFileManagerTool.removeItem(from: resources.pairedVideo)
+//                    }
+//                })
+//            }
+//        }
+    }
 }
 
 
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+// MARK: - PHPickerViewControllerDelegate
+//extension TSEditLiveVC: PHPickerViewControllerDelegate {
+//
+//    /// Present `PHPickerViewController`
+//    func pick(_ filter: PHPickerFilter) {
+//        var config = PHPickerConfiguration()
+//        config.filter = filter
+//        config.selectionLimit = 1
+//        config.preferredAssetRepresentationMode = .current
+//        let picker = PHPickerViewController(configuration: config)
+//        picker.delegate = self
+//        picker.modalPresentationStyle = .overFullScreen
+//        present(picker, animated: true, completion: nil)
+//    }
+//
+//    func picker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
+//        defer { picker.dismiss(animated: true) }
+//        assemblePicker(picker, didFinishPicking: results)
+//    }
+//
+//    func assemblePicker(_ picker: PHPickerViewController, didFinishPicking results: [PHPickerResult]) {
+//        guard let itemProvider = results.first?.itemProvider else {
+//            return
+//        }
+//
+//        if itemProvider.hasItemConformingToTypeIdentifier(UTType.movie.identifier) {
+//            itemProvider.loadFileRepresentation(forTypeIdentifier: itemProvider.registeredTypeIdentifiers.first!) { [weak self] url, error in
+//                guard let self, let url = url else {
+//                    return
+//                }
+//                do {
+//                    let cachesDirectory = try self.cachesDirectory()
+//                    let targetURL = cachesDirectory.appendingPathComponent("assemblePickerVideo").appendingPathExtension(url.pathExtension)
+//                    let fileManager = FileManager.default
+//                    // 如果目标路径存在同名文件,先删除旧文件
+//                    if fileManager.fileExists(atPath: targetURL.path) {
+//                        try fileManager.removeItem(at: targetURL)
+//                    }
+//
+//                    try fileManager.copyItem(at: url, to: targetURL)
+//
+//                    kExecuteOnMainThread {
+//                        self.openVideoClipperVC(videoURL: targetURL)
+//                    }
+//
+//                } catch {
+//                    TSToastShared.showToast(message: "An error occurred")
+//                }
+//            }
+//        }
+//    }
+//
+//    private func cachesDirectory() throws -> URL {
+//        let cachesDirectoryURL = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: false)
+//        let cachesDirectory = cachesDirectoryURL.appendingPathComponent("asemble", isDirectory: true)
+//        if !FileManager.default.fileExists(atPath: cachesDirectory.absoluteString) {
+//            try FileManager.default.createDirectory(at: cachesDirectory, withIntermediateDirectories: true, attributes: nil)
+//        }
+//        return cachesDirectory
+//    }
+//}
+
+
+

+ 71 - 17
TSLiveWallpaper/Business/TSHomeVC/TSLiveWallpaperBrowseVC/EasyVC/TSLiveWallpaperBrowseVC.swift → TSLiveWallpaper/Business/TSHomeVC/TSLiveWallpaperBrowseVC/TSLiveWallpaperBrowseVC.swift

@@ -30,11 +30,10 @@ class TSLiveWallpaperBrowseVC: TSBaseVC {
             self.btnsAllView.isHidden = isPreview
         }
     }
-//    var itemModels:[TSImageDataItemModel]!
+    
     private var dataModelArray = [TSLiveWallpaperBrowseItemModel]()
     var currentIndex:Int
     init(itemModels: [TSImageDataItemModel],currentIndex:Int) {
-//        self.itemModels = itemModels
         self.currentIndex = currentIndex
         for itemModel in itemModels {
             let model = TSLiveWallpaperBrowseItemModel()
@@ -454,23 +453,63 @@ class TSLiveWallpaperBrowseCell : TSBaseCollectionCell,PHLivePhotoViewDelegate{
                         return
                     }
                     
-                    livePhotoTool.generate(from: imageCacheUrl, videoURL: videoCacheUrl, progress: { (percent) in
-                        debugPrint(percent)
-                    }) { [weak self] (livePhoto, resources) in
-                        guard let self = self else { return }
-                        
-                        loading.stopAnimating()
-                        itemModel?.livePhoto = livePhoto
-                        itemModel?.livePhotoResources = resources
+                    if videoCacheUrl.path.contains("/saveVideo/") {
+                        self.loading.stopAnimating()
                         
-                        if let livePhoto = livePhoto {
-                            self.livePhotoView.livePhoto = livePhoto
-                            self.livePhotoView.isHidden = false
-                            self.livePhotoView.startPlayback(with: .full)
+                        LivePhotoConverter.livePhotoRequest(videoURL: videoCacheUrl, imageURL: imageCacheUrl) { livePhoto in
+                            self.itemModel?.livePhoto = livePhoto
+                            self.itemModel?.livePhotoResources = (imageCacheUrl,videoCacheUrl)
+                            
+                            if let livePhoto = livePhoto {
+                                self.livePhotoView.livePhoto = livePhoto
+                                self.livePhotoView.isHidden = false
+                                self.livePhotoView.startPlayback(with: .full)
+                            }else{
+                                debugPrint("livePhoto.generate fail")
+                            }
+                        }
+                        return
+                    }
+//                    LivePhotoConverter.convertVideo(videoCacheUrl, imageURL: imageCacheUrl) { success, photoURL, videoURL, errorMsg in
+                    LivePhotoConverter.convertVideo(videoCacheUrl) { success, photoURL, videoURL, errorMsg in
+                        self.loading.stopAnimating()
+                        if success {
+                            LivePhotoConverter.livePhotoRequest(videoURL: videoURL!, imageURL: photoURL!) { livePhoto in
+                                self.itemModel?.livePhoto = livePhoto
+                                self.itemModel?.livePhotoResources = (photoURL!,videoURL!)
+                                
+                                if let livePhoto = livePhoto {
+                                    self.livePhotoView.livePhoto = livePhoto
+                                    self.livePhotoView.isHidden = false
+                                    self.livePhotoView.startPlayback(with: .full)
+                                }else{
+                                    debugPrint("livePhoto.generate fail")
+                                }
+                            }
                         }else{
-                            debugPrint("livePhoto.generate fail")
+                            debugPrint(errorMsg)
                         }
                     }
+                    
+                    
+//                    livePhotoTool.generate(from: imageCacheUrl, videoURL: videoCacheUrl, progress: { (percent) in
+//                        debugPrint(percent)
+//                    }) { [weak self] (livePhoto, resources) in
+//                        guard let self = self else { return }
+//                        
+//                        loading.stopAnimating()
+//                        itemModel?.livePhoto = livePhoto
+//                        itemModel?.livePhotoResources = resources
+//                        
+//                        if let livePhoto = livePhoto {
+//                            self.livePhotoView.livePhoto = livePhoto
+//                            self.livePhotoView.isHidden = false
+//                            self.livePhotoView.startPlayback(with: .full)
+//                        }else{
+//                            debugPrint("livePhoto.generate fail")
+//                        }
+//                    }
+                    
                 }
             }
         }
@@ -500,8 +539,22 @@ class TSLiveWallpaperBrowseCell : TSBaseCollectionCell,PHLivePhotoViewDelegate{
     }
     
     func saveLivePhoto(completion: @escaping (Bool) -> Void){
+//        if let resources = itemModel?.livePhotoResources {
+//            LivePhoto.saveToLibrary(resources, completion: { (success) in
+//                kExecuteOnMainThread {
+//                    if success {
+//                        debugPrint("Live Photo Saved,The live photo was successfully saved to Photos.")
+//                        completion(true)
+//                    }else {
+//                        debugPrint("Live Photo Not Saved,The live photo was not saved to Photos.")
+//                        completion(false)
+//                    }
+//                }
+//            })
+//        }
+        
         if let resources = itemModel?.livePhotoResources {
-            LivePhoto.saveToLibrary(resources, completion: { (success) in
+            LivePhotoConverter.saveToLibrary(videoURL: resources.pairedVideo, imageURL: resources.pairedImage) { success in
                 kExecuteOnMainThread {
                     if success {
                         debugPrint("Live Photo Saved,The live photo was successfully saved to Photos.")
@@ -511,8 +564,9 @@ class TSLiveWallpaperBrowseCell : TSBaseCollectionCell,PHLivePhotoViewDelegate{
                         completion(false)
                     }
                 }
-            })
+            }
         }
+
     }
     
     func stopPlayLive() {

+ 133 - 133
TSLiveWallpaper/Common/ThirdParty/LivePhoto.swift

@@ -368,136 +368,136 @@ class LivePhoto {
     
 }
 
-fileprivate extension AVAsset {
-    func countFrames(exact:Bool) -> Int {
-        
-        var frameCount = 0
-        
-        if let videoReader = try? AVAssetReader(asset: self)  {
-            
-            if let videoTrack = self.tracks(withMediaType: .video).first {
-                
-                frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
-                
-                
-                if exact {
-                    
-                    frameCount = 0
-                    
-                    let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
-                    videoReader.add(videoReaderOutput)
-                    
-                    videoReader.startReading()
-                    
-                    // count frames
-                    while true {
-                        let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
-                        if sampleBuffer == nil {
-                            break
-                        }
-                        frameCount += 1
-                    }
-                    
-                    videoReader.cancelReading()
-                }
-                
-                
-            }
-        }
-        
-        return frameCount
-    }
-    
-    func stillImageTime() -> CMTime?  {
-        
-        var stillTime:CMTime? = nil
-        
-        if let videoReader = try? AVAssetReader(asset: self)  {
-            
-            if let metadataTrack = self.tracks(withMediaType: .metadata).first {
-                
-                let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
-                
-                videoReader.add(videoReaderOutput)
-                
-                videoReader.startReading()
-                
-                let keyStillImageTime = "com.apple.quicktime.still-image-time"
-                let keySpaceQuickTimeMetadata = "mdta"
-                
-                var found = false
-                
-                while found == false {
-                    if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
-                        if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
-                            let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
-                            for item in group?.items ?? [] {
-                                if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
-                                    stillTime = group?.timeRange.start
-                                    //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
-                                    found = true
-                                    break
-                                }
-                            }
-                        }
-                    }
-                    else {
-                        break;
-                    }
-                }
-                
-                videoReader.cancelReading()
-                
-            }
-        }
-        
-        return stillTime
-    }
-    
-    func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
-        
-        var time = self.duration
-        
-        var frameCount = inFrameCount
-        
-        if frameCount == 0 {
-            frameCount = self.countFrames(exact: true)
-        }
-        
-        let frameDuration = Int64(Float(time.value) / Float(frameCount))
-        
-        time.value = Int64(Float(time.value) * percent)
-        
-        //print("stillImageTime = \(CMTimeGetSeconds(time))")
-        
-        return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
-    }
-    
-    func getAssetFrame(percent:Float) -> UIImage?
-    {
-        
-        let imageGenerator = AVAssetImageGenerator(asset: self)
-        imageGenerator.appliesPreferredTrackTransform = true
-        
-        imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1,timescale: 100)
-        imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1,timescale: 100)
-        
-        var time = self.duration
-        
-        time.value = Int64(Float(time.value) * percent)
-        
-        do {
-            var actualTime = CMTime.zero
-            let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
-            
-            let img = UIImage(cgImage: imageRef)
-            
-            return img
-        }
-        catch let error as NSError
-        {
-            print("Image generation failed with error \(error)")
-            return nil
-        }
-    }
-}
+//fileprivate extension AVAsset {
+//    func countFrames(exact:Bool) -> Int {
+//        
+//        var frameCount = 0
+//        
+//        if let videoReader = try? AVAssetReader(asset: self)  {
+//            
+//            if let videoTrack = self.tracks(withMediaType: .video).first {
+//                
+//                frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
+//                
+//                
+//                if exact {
+//                    
+//                    frameCount = 0
+//                    
+//                    let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
+//                    videoReader.add(videoReaderOutput)
+//                    
+//                    videoReader.startReading()
+//                    
+//                    // count frames
+//                    while true {
+//                        let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
+//                        if sampleBuffer == nil {
+//                            break
+//                        }
+//                        frameCount += 1
+//                    }
+//                    
+//                    videoReader.cancelReading()
+//                }
+//                
+//                
+//            }
+//        }
+//        
+//        return frameCount
+//    }
+//    
+//    func stillImageTime() -> CMTime?  {
+//        
+//        var stillTime:CMTime? = nil
+//        
+//        if let videoReader = try? AVAssetReader(asset: self)  {
+//            
+//            if let metadataTrack = self.tracks(withMediaType: .metadata).first {
+//                
+//                let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
+//                
+//                videoReader.add(videoReaderOutput)
+//                
+//                videoReader.startReading()
+//                
+//                let keyStillImageTime = "com.apple.quicktime.still-image-time"
+//                let keySpaceQuickTimeMetadata = "mdta"
+//                
+//                var found = false
+//                
+//                while found == false {
+//                    if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
+//                        if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
+//                            let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
+//                            for item in group?.items ?? [] {
+//                                if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
+//                                    stillTime = group?.timeRange.start
+//                                    //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
+//                                    found = true
+//                                    break
+//                                }
+//                            }
+//                        }
+//                    }
+//                    else {
+//                        break;
+//                    }
+//                }
+//                
+//                videoReader.cancelReading()
+//                
+//            }
+//        }
+//        
+//        return stillTime
+//    }
+//    
+//    func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
+//        
+//        var time = self.duration
+//        
+//        var frameCount = inFrameCount
+//        
+//        if frameCount == 0 {
+//            frameCount = self.countFrames(exact: true)
+//        }
+//        
+//        let frameDuration = Int64(Float(time.value) / Float(frameCount))
+//        
+//        time.value = Int64(Float(time.value) * percent)
+//        
+//        //print("stillImageTime = \(CMTimeGetSeconds(time))")
+//        
+//        return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
+//    }
+//    
+//    func getAssetFrame(percent:Float) -> UIImage?
+//    {
+//        
+//        let imageGenerator = AVAssetImageGenerator(asset: self)
+//        imageGenerator.appliesPreferredTrackTransform = true
+//        
+//        imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1,timescale: 100)
+//        imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1,timescale: 100)
+//        
+//        var time = self.duration
+//        
+//        time.value = Int64(Float(time.value) * percent)
+//        
+//        do {
+//            var actualTime = CMTime.zero
+//            let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
+//            
+//            let img = UIImage(cgImage: imageRef)
+//            
+//            return img
+//        }
+//        catch let error as NSError
+//        {
+//            print("Image generation failed with error \(error)")
+//            return nil
+//        }
+//    }
+//}

BIN
TSLiveWallpaper/Common/ThirdParty/Util/1.mov


+ 136 - 0
TSLiveWallpaper/Common/ThirdParty/Util/AVAssetExtension.swift

@@ -0,0 +1,136 @@
+import AVKit
+
+extension AVAsset {
+    func countFrames(exact:Bool) -> Int {
+        
+        var frameCount = 0
+        
+        if let videoReader = try? AVAssetReader(asset: self)  {
+            
+            if let videoTrack = self.tracks(withMediaType: .video).first {
+                
+                frameCount = Int(CMTimeGetSeconds(self.duration) * Float64(videoTrack.nominalFrameRate))
+                
+                
+                if exact {
+                    
+                    frameCount = 0
+                    
+                    let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: nil)
+                    videoReader.add(videoReaderOutput)
+                    
+                    videoReader.startReading()
+                    
+                    // count frames
+                    while true {
+                        let sampleBuffer = videoReaderOutput.copyNextSampleBuffer()
+                        if sampleBuffer == nil {
+                            break
+                        }
+                        frameCount += 1
+                    }
+                    
+                    videoReader.cancelReading()
+                }
+                
+                
+            }
+        }
+        
+        return frameCount
+    }
+    
+    func stillImageTime() -> CMTime?  {
+        
+        var stillTime:CMTime? = nil
+        
+        if let videoReader = try? AVAssetReader(asset: self)  {
+            
+            if let metadataTrack = self.tracks(withMediaType: .metadata).first {
+                
+                let videoReaderOutput = AVAssetReaderTrackOutput(track: metadataTrack, outputSettings: nil)
+                
+                videoReader.add(videoReaderOutput)
+                
+                videoReader.startReading()
+                
+                let keyStillImageTime = "com.apple.quicktime.still-image-time"
+                let keySpaceQuickTimeMetadata = "mdta"
+                
+                var found = false
+                
+                while found == false {
+                    if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
+                        if CMSampleBufferGetNumSamples(sampleBuffer) != 0 {
+                            let group = AVTimedMetadataGroup(sampleBuffer: sampleBuffer)
+                            for item in group?.items ?? [] {
+                                if item.key as? String == keyStillImageTime && item.keySpace!.rawValue == keySpaceQuickTimeMetadata {
+                                    stillTime = group?.timeRange.start
+                                    //print("stillImageTime = \(CMTimeGetSeconds(stillTime!))")
+                                    found = true
+                                    break
+                                }
+                            }
+                        }
+                    }
+                    else {
+                        break;
+                    }
+                }
+                
+                videoReader.cancelReading()
+                
+            }
+        }
+        
+        return stillTime
+    }
+    
+    func makeStillImageTimeRange(percent:Float, inFrameCount:Int = 0) -> CMTimeRange {
+        
+        var time = self.duration
+        
+        var frameCount = inFrameCount
+        
+        if frameCount == 0 {
+            frameCount = self.countFrames(exact: true)
+        }
+        
+        let frameDuration = Int64(Float(time.value) / Float(frameCount))
+        
+        time.value = Int64(Float(time.value) * percent)
+        
+        //print("stillImageTime = \(CMTimeGetSeconds(time))")
+        
+        return CMTimeRangeMake(start: time, duration: CMTimeMake(value: frameDuration, timescale: time.timescale))
+    }
+    
+    func getAssetFrame(percent:Float) -> UIImage?
+    {
+        
+        let imageGenerator = AVAssetImageGenerator(asset: self)
+        imageGenerator.appliesPreferredTrackTransform = true
+        
+        imageGenerator.requestedTimeToleranceAfter = CMTimeMake(value: 1,timescale: 100)
+        imageGenerator.requestedTimeToleranceBefore = CMTimeMake(value: 1,timescale: 100)
+        
+        var time = self.duration
+        
+        time.value = Int64(Float(time.value) * percent)
+        
+        do {
+            var actualTime = CMTime.zero
+            let imageRef = try imageGenerator.copyCGImage(at: time, actualTime:&actualTime)
+            
+            let img = UIImage(cgImage: imageRef)
+            
+            return img
+        }
+        catch let error as NSError
+        {
+            print("Image generation failed with error \(error)")
+            return nil
+        }
+    }
+}
+

+ 57 - 0
TSLiveWallpaper/Common/ThirdParty/Util/Converter4Image.swift

@@ -0,0 +1,57 @@
+import UIKit
+import UniformTypeIdentifiers
+import CoreServices
+import ImageIO
+import Photos
+
+@objc class Converter4Image : NSObject {
+    private let kFigAppleMakerNote_AssetIdentifier = "17"
+    private let image : UIImage
+
+    @objc init(image : UIImage) {
+        self.image = image
+    }
+
+    @objc func read() -> String? {
+        guard let makerNote = metadata(index: 0)?.object(forKey: kCGImagePropertyMakerAppleDictionary) as? NSDictionary else {
+            return nil
+        }
+        return makerNote.object(forKey: kFigAppleMakerNote_AssetIdentifier) as? String
+    }
+
+    @objc func write(dest : String, assetIdentifier : String) {
+        guard let destURL = URL(fileURLWithPath: dest) as CFURL?,
+              let dest = CGImageDestinationCreateWithURL(destURL, UTType.heic.identifier as CFString, 1, nil) else { return }
+        defer { CGImageDestinationFinalize(dest) }
+        for i in 0...0 {
+            guard let imageSource = self.imageSource() else { return }
+            guard let metadata = self.metadata(index: i)?.mutableCopy() as? NSMutableDictionary else { return }
+            
+            let makerNote = NSMutableDictionary()
+            makerNote.setObject(assetIdentifier, forKey: kFigAppleMakerNote_AssetIdentifier as NSCopying)
+            metadata.setObject(makerNote, forKey: kCGImagePropertyMakerAppleDictionary as NSString)
+//            metadata.setObject("sRGB IEC61966-2.1", forKey: kCGImagePropertyProfileName as NSString)
+            CGImageDestinationAddImageFromSource(dest, imageSource, i, metadata as CFDictionary)
+        }
+    }
+
+    private func metadata(index: Int) -> NSDictionary? {
+        return self.imageSource().flatMap {
+            CGImageSourceCopyPropertiesAtIndex($0, index, nil) as NSDictionary?
+        }
+    }
+
+    private func imageSource() -> CGImageSource? {
+        return self.data().flatMap {
+            CGImageSourceCreateWithData($0 as CFData, nil)
+        }
+    }
+
+    private func data() -> Data? {
+        if #available(iOS 17.0, *) {
+            return image.heicData()
+        } else {
+            return image.pngData()
+        }
+    }
+}

+ 660 - 0
TSLiveWallpaper/Common/ThirdParty/Util/Converter4Video.swift

@@ -0,0 +1,660 @@
+import Foundation
+import AVFoundation
+import UIKit
+
+@objc class Converter4Video : NSObject {
+    private let kKeyContentIdentifier =  "com.apple.quicktime.content.identifier"
+    private let kKeyStillImageTime = "com.apple.quicktime.still-image-time"
+    private let kKeySpaceQuickTimeMetadata = "mdta"
+    private let path : String
+
+    private lazy var asset : AVURLAsset = {
+        let url = NSURL(fileURLWithPath: self.path)
+        return AVURLAsset(url: url as URL)
+    }()
+
+    @objc init(path : String) {
+        self.path = path
+    }
+
+    @objc func readAssetIdentifier() -> String? {
+        for item in metadata() {
+            if item.key as? String == kKeyContentIdentifier &&
+                item.keySpace?.rawValue == kKeySpaceQuickTimeMetadata {
+                return item.value as? String
+            }
+        }
+        return nil
+    }
+    
+    private func reader(track: AVAssetTrack, settings: [String:AnyObject]?) throws -> (AVAssetReader, AVAssetReaderOutput) {
+        let output = AVAssetReaderTrackOutput(track: track, outputSettings: settings)
+        let reader = try AVAssetReader(asset: asset)
+        reader.add(output)
+        return (reader, output)
+    }
+
+//    func readStillImageTime() -> NSNumber? {
+//        if let track = track(mediaType: AVMediaType.metadata.rawValue) {
+//            let (reader, output) = try! self.reader(track: track, settings: nil)
+//            reader.startReading()
+//
+//            while true {
+//                guard let buffer = output.copyNextSampleBuffer() else { return nil }
+//                if CMSampleBufferGetNumSamples(buffer) != 0 {
+//                    let group = AVTimedMetadataGroup(sampleBuffer: buffer)
+//                    for item in group?.items ?? [] {
+//                        if item.key as? String == kKeyStillImageTime &&
+//                            item.keySpace?.rawValue == kKeySpaceQuickTimeMetadata {
+//                                return item.numberValue
+//                        }
+//                    }
+//                }
+//            }
+//        }
+//        return nil
+//    }
+    
+    private func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
+        let keyStillImageTime = "com.apple.quicktime.still-image-time"
+        let keySpaceQuickTimeMetadata = "mdta"
+        let spec : NSDictionary = [
+            kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString:
+            "\(keySpaceQuickTimeMetadata)/\(keyStillImageTime)",
+            kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString:
+            "com.apple.metadata.datatype.int8"            ]
+        var desc : CMFormatDescription? = nil
+        CMMetadataFormatDescriptionCreateWithMetadataSpecifications(allocator: kCFAllocatorDefault, metadataType: kCMMetadataFormatType_Boxed, metadataSpecifications: [spec] as CFArray, formatDescriptionOut: &desc)
+        let input = AVAssetWriterInput(mediaType: .metadata,
+                                       outputSettings: nil, sourceFormatHint: desc)
+        return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
+    }
+    
+    private func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
+        let item = AVMutableMetadataItem()
+        let keyContentIdentifier =  "com.apple.quicktime.content.identifier"
+        let keySpaceQuickTimeMetadata = "mdta"
+        item.key = keyContentIdentifier as (NSCopying & NSObjectProtocol)?
+        item.keySpace = AVMetadataKeySpace(rawValue: keySpaceQuickTimeMetadata)
+        item.value = assetIdentifier as (NSCopying & NSObjectProtocol)?
+        item.dataType = "com.apple.metadata.datatype.UTF-8"
+        return item
+    }
+
+    private func metadataForStillImageTime() -> AVMetadataItem {
+        let item = AVMutableMetadataItem()
+        item.key = kKeyStillImageTime as any NSCopying & NSObjectProtocol
+        item.keySpace = AVMetadataKeySpace.quickTimeMetadata
+        item.value = 0 as (NSCopying & NSObjectProtocol)?
+        item.dataType = kCMMetadataBaseDataType_SInt8 as String
+        return item.copy() as! AVMetadataItem
+    }
+
+    @objc func write(dest: String, assetIdentifier: String, metaURL: URL, completion: @escaping (Bool, Error?) -> Void) {
+        do {
+            let metadataAsset = AVURLAsset(url: metaURL)
+            
+            let readerVideo = try AVAssetReader(asset: asset)
+            let readerMetadata = try AVAssetReader(asset: metadataAsset)
+            
+            let writer = try AVAssetWriter(outputURL: URL(fileURLWithPath: dest), fileType: .mov)
+            
+            let writingGroup = DispatchGroup()
+            
+            var videoIOs = [(AVAssetWriterInput, AVAssetReaderTrackOutput)]()
+            var metadataIOs = [(AVAssetWriterInput, AVAssetReaderTrackOutput)]()
+            
+            self.loadTracks(asset: self.asset, type: .video) { videoTracks in
+                for track in videoTracks {
+                    let trackReaderOutput = AVAssetReaderTrackOutput(track: track, outputSettings: [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)])
+                    readerVideo.add(trackReaderOutput)
+                    
+                    let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : track.naturalSize.width, AVVideoHeightKey : track.naturalSize.height])
+                    videoInput.transform = track.preferredTransform
+                    videoInput.expectsMediaDataInRealTime = true
+                    writer.add(videoInput)
+                    
+                    videoIOs.append((videoInput, trackReaderOutput))
+                }
+                
+                self.loadTracks(asset: metadataAsset, type: .metadata) { metadataTracks in
+                    for track in metadataTracks {
+                        let trackReaderOutput = AVAssetReaderTrackOutput(track: track, outputSettings: nil)
+                        readerMetadata.add(trackReaderOutput)
+                        
+                        let metadataInput = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil)
+                        writer.add(metadataInput)
+                        
+                        metadataIOs.append((metadataInput, trackReaderOutput))
+                    }
+                    
+                    writer.metadata = [self.metadataForAssetID(assetIdentifier)]
+//                    let stillImageTimeMetadataAdapter = self.createMetadataAdaptorForStillImageTime()
+//                    writer.add(stillImageTimeMetadataAdapter.assetWriterInput)
+                    
+                    writer.startWriting()
+                    readerVideo.startReading()
+                    readerMetadata.startReading()
+                    writer.startSession(atSourceTime: .zero)
+                    
+//                    let _stillImagePercent: Float = 0.2
+//                    stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(items: [self.metadataForStillImageTime()],timeRange: self.asset.makeStillImageTimeRange(percent: _stillImagePercent, inFrameCount: self.asset.countFrames(exact: false))))
+                    
+                    for (videoInput, videoOutput) in videoIOs {
+                        writingGroup.enter()
+                        videoInput.requestMediaDataWhenReady(on: DispatchQueue(label: "assetWriterQueue.video")) {
+                            while videoInput.isReadyForMoreMediaData {
+                                if let sampleBuffer = videoOutput.copyNextSampleBuffer() {
+                                    videoInput.append(sampleBuffer)
+                                } else {
+                                    videoInput.markAsFinished()
+                                    writingGroup.leave()
+                                    break
+                                }
+                            }
+                        }
+                    }
+                    for (metadataInput, metadataOutput) in metadataIOs {
+                        writingGroup.enter()
+                        metadataInput.requestMediaDataWhenReady(on: DispatchQueue(label: "assetWriterQueue.metadata")) {
+                            while metadataInput.isReadyForMoreMediaData {
+                                if let sampleBuffer = metadataOutput.copyNextSampleBuffer() {
+                                    metadataInput.append(sampleBuffer)
+                                } else {
+                                    metadataInput.markAsFinished()
+                                    writingGroup.leave()
+                                    break
+                                }
+                            }
+                        }
+                    }
+                    
+                    writingGroup.notify(queue: .main) {
+                        if
+                            readerVideo.status == .completed &&
+                            readerMetadata.status == .completed &&
+                            writer.status == .writing {
+                            writer.finishWriting {
+                                completion(writer.status == .completed, writer.error)
+                            }
+                        } else {
+                            if let readerError = readerVideo.error {
+                                completion(false, readerError)
+                            } else if let readerError = readerMetadata.error {
+                                completion(false, readerError)
+                            } else if let writerError = writer.error {
+                                completion(false, writerError)
+                            } else {
+                                completion(false, NSError(domain: "VideoProcessing", code: -1, userInfo: [NSLocalizedDescriptionKey: "Unkown error"]))
+                            }
+                        }
+                    }
+                }
+            }
+        } catch {
+            completion(false, error)
+        }
+    }
+
+    private func metadata() -> [AVMetadataItem] {
+        return asset.metadata(forFormat: AVMetadataFormat.quickTimeMetadata)
+    }
+
+    private func degressFromVideoFileWithURL(videoTrack: AVAssetTrack)->Int {
+        var degress = 0
+     
+        let t: CGAffineTransform = videoTrack.preferredTransform
+        if(t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0){
+            // Portrait
+            degress = 90
+        }else if(t.a == 0 && t.b == -1.0 && t.c == 1.0 && t.d == 0){
+            // PortraitUpsideDown
+            degress = 270
+        }else if(t.a == 1.0 && t.b == 0 && t.c == 0 && t.d == 1.0){
+            // LandscapeRight
+            degress = 0
+        }else if(t.a == -1.0 && t.b == 0 && t.c == 0 && t.d == -1.0){
+            // LandscapeLeft
+            degress = 180
+        }
+        return degress
+    }
+
+    @objc public func cleanTransformVideo(at inputPath: String, outputPath: String, completion: @escaping (Bool, Error?) -> Void) {
+        let inputURL = URL(fileURLWithPath: inputPath)
+        let outputURL = URL(fileURLWithPath: outputPath)
+        
+        let asset = AVAsset(url: inputURL)
+        self.loadTracks(asset: asset, type: .video) { videoTracks in
+            guard let videoTrack = videoTracks.first else {
+                completion(false, NSError(domain: "Clean Transform", code: -1, userInfo: [NSLocalizedDescriptionKey: "Video track is not available"]))
+                return
+            }
+            
+            let videoComposition = AVMutableComposition()
+            guard let track = videoComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
+                return
+            }
+            do {
+                try track.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration),
+                                      of: videoTrack,
+                                      at: .zero)
+                track.preferredTransform = .identity
+                
+                let exportSession = AVAssetExportSession(asset: videoComposition, presetName: AVAssetExportPresetPassthrough)!
+                exportSession.outputURL = outputURL
+                exportSession.outputFileType = .mov
+                
+                exportSession.exportAsynchronously {
+                    DispatchQueue.main.async {
+                        switch exportSession.status {
+                        case .completed:
+                            completion(true, nil)
+                        case .failed:
+                            completion(false, exportSession.error)
+                        default:
+                            break
+                        }
+                    }
+                }
+            } catch {
+                print("\(error)")
+            }
+        }
+    }
+    
+    @objc public func accelerateVideo(at inputPath: String, to duration: CMTime, outputPath: String, completion: @escaping (Bool, Error?) -> Void) {
+        let videoURL = URL(fileURLWithPath: inputPath)
+        let asset = AVAsset(url: videoURL)
+
+        let composition = AVMutableComposition()
+        self.loadTracks(asset: asset, type: .video) { videoTracks in
+            do {
+                guard let videoTrack = videoTracks.first else {
+                    completion(false, NSError(domain: "Accelerate", code: -1, userInfo: [NSLocalizedDescriptionKey: "Video track is not available"]))
+                    return
+                }
+                
+                let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video,
+                                                                         preferredTrackID: kCMPersistentTrackID_Invalid)
+                
+                try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration),
+                                                            of: videoTrack,
+                                                            at: .zero)
+                let targetDuration = duration
+                
+                compositionVideoTrack?.scaleTimeRange(CMTimeRangeMake(start: .zero, duration: asset.duration),
+                                                       toDuration: targetDuration)
+                compositionVideoTrack?.preferredTransform = videoTrack.preferredTransform
+                
+                guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
+                    return
+                }
+                
+                let outputFileURL = URL(fileURLWithPath: outputPath)
+                exportSession.outputURL = outputFileURL
+                exportSession.outputFileType = .mov
+                exportSession.exportAsynchronously {
+                    switch exportSession.status {
+                    case .completed:
+                        completion(true, nil)
+                    case .failed:
+                        completion(false, exportSession.error)
+                    default:
+                        completion(false, NSError(domain: "VideoProcessing", code: -1, userInfo: [NSLocalizedDescriptionKey: "Unknown error"]))
+                    }
+                }
+
+            } catch {
+                completion(false, error)
+            }
+        }
+    }
+    
+    
+    @objc public func resizeVideo(at inputPath: String, outputPath: String, outputSize: CGSize, completion: @escaping (Bool, Error?) -> Void) {
+        let inputURL = URL(fileURLWithPath: inputPath)
+        let outputURL = URL(fileURLWithPath: outputPath)
+        
+        let asset = AVAsset(url: inputURL)
+        self.loadTracks(asset: asset, type: .video) { videoTracks in
+            guard let videoTrack = videoTracks.first else {
+                completion(false, NSError(domain: "Resize", code: -1, userInfo: [NSLocalizedDescriptionKey: "Video track is not available"]))
+                return
+            }
+            
+            let originDegree = self.degressFromVideoFileWithURL(videoTrack: videoTrack)
+            if originDegree != 0 {
+                let tmpPath = NSSearchPathForDirectoriesInDomains(.cachesDirectory, .userDomainMask, true).first! + "/tmp.mp4"
+                try? FileManager.default.removeItem(atPath: tmpPath)
+                self.cleanTransformVideo(at: inputPath, outputPath: tmpPath) { success, error in
+                    self.rotateVideo(at: tmpPath, outputPath: outputPath, degree: originDegree, completion: completion)
+                }
+                return
+            }
+            
+            let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
+            exportSession.outputURL = outputURL
+            exportSession.outputFileType = .mov
+            exportSession.shouldOptimizeForNetworkUse = true
+            
+            let videoComposition = AVMutableVideoComposition()
+            videoComposition.renderSize = outputSize
+            videoComposition.frameDuration = CMTime(value: 1, timescale: 60)
+            
+            let instruction = AVMutableVideoCompositionInstruction()
+            instruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
+            
+            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
+            
+            let preferredTransform = videoTrack.preferredTransform
+
+            let originalSize = CGSize(width: videoTrack.naturalSize.width, height: videoTrack.naturalSize.height)
+            let transformedSize = originalSize.applying(preferredTransform)
+            let absoluteSize = CGSize(width: abs(transformedSize.width), height: abs(transformedSize.height))
+            
+            let widthRatio = outputSize.width / absoluteSize.width
+            let heightRatio = outputSize.height / absoluteSize.height
+            let scaleFactor = min(widthRatio, heightRatio)
+
+            let newWidth = absoluteSize.width * scaleFactor
+            let newHeight = absoluteSize.height * scaleFactor
+
+            let translateX = (outputSize.width - newWidth) / 2
+            let translateY = (outputSize.height - newHeight) / 2
+
+            let translateTransform = CGAffineTransform(translationX: translateX, y: translateY).scaledBy(x: scaleFactor, y: scaleFactor)
+
+            layerInstruction.setTransform(translateTransform, at: .zero)
+            
+            instruction.layerInstructions = [layerInstruction]
+            videoComposition.instructions = [instruction]
+            
+            exportSession.videoComposition = videoComposition
+            
+            exportSession.exportAsynchronously {
+                DispatchQueue.main.async {
+                    switch exportSession.status {
+                    case .completed:
+                        completion(true, nil)
+                    case .failed:
+                        completion(false, exportSession.error)
+                    default:
+                        break
+                    }
+                }
+            }
+        }
+    }
+    
+    @objc public func rotateVideo(at inputPath: String, outputPath: String, degree: Int, completion: @escaping (Bool, Error?) -> Void) {
+        let inputURL = URL(fileURLWithPath: inputPath)
+        let outputURL = URL(fileURLWithPath: outputPath)
+        
+        let asset = AVAsset(url: inputURL)
+        self.loadTracks(asset: asset, type: .video) { videoTracks in
+            guard let videoTrack = videoTracks.first else {
+                completion(false, NSError(domain: "Resize", code: -1, userInfo: [NSLocalizedDescriptionKey: "Video track is not available"]))
+                return
+            }
+            
+            let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
+            exportSession.outputURL = outputURL
+            exportSession.outputFileType = .mov
+            exportSession.shouldOptimizeForNetworkUse = true
+            
+            let videoComposition = AVMutableVideoComposition()
+            videoComposition.renderSize =  abs(degree) == 90 ? CGSizeMake(videoTrack.naturalSize.height, videoTrack.naturalSize.width) : videoTrack.naturalSize
+            videoComposition.frameDuration = CMTime(value: 1, timescale: 60)
+            
+            let instruction = AVMutableVideoCompositionInstruction()
+            instruction.timeRange = CMTimeRange(start: .zero, duration: asset.duration)
+            
+            let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
+
+            let translateTransform = CGAffineTransform(rotationAngle: Double.pi / 2)
+
+            if (degree == 90) {
+                layerInstruction.setTransform(CGAffineTransform(translationX: videoTrack.naturalSize.height, y: 0).rotated(by: .pi / 2), at: .zero)
+            } else if (degree == -90) {
+                layerInstruction.setTransform(CGAffineTransform(translationX: 0, y: videoTrack.naturalSize.width).rotated(by: -.pi / 2), at: .zero)
+            } else {
+                layerInstruction.setTransform(CGAffineTransform(translationX: videoTrack.naturalSize.width, y: videoTrack.naturalSize.height).rotated(by: .pi), at: .zero)
+            }
+            
+            instruction.layerInstructions = [layerInstruction]
+            videoComposition.instructions = [instruction]
+            
+            exportSession.videoComposition = videoComposition
+            
+            exportSession.exportAsynchronously {
+                DispatchQueue.main.async {
+                    switch exportSession.status {
+                    case .completed:
+                        completion(true, nil)
+                    case .failed:
+                        completion(false, exportSession.error)
+                    default:
+                        break
+                    }
+                }
+            }
+        }
+    }
+    
+    private func loadTracks(asset: AVAsset, type: AVMediaType, completion: @escaping ([AVAssetTrack]) -> Void) {
+        let tracksKey = #keyPath(AVAsset.tracks)
+        if #available(iOS 15.0, *) {
+            asset.loadTracks(withMediaType: type) { tracks, error in
+                if let error = error {
+                    print(error)
+                }
+                DispatchQueue.main.async {
+                    completion(tracks ?? [])
+                }
+            }
+        } else {
+            asset.loadValuesAsynchronously(forKeys: [tracksKey]) {
+                let status = asset.statusOfValue(forKey: "tracks", error: nil)
+                if (status == .loaded) {
+                    DispatchQueue.main.async {
+                        print(asset) // <-- amazing trick
+                        completion(asset.tracks(withMediaType: type))
+                    }
+                } else if (status == .cancelled) {
+                    print("load tracks cancelled")
+                } else if (status == .unknown) {
+                    print("load tracks unknown")
+                } else if (status == .failed) {
+                    print("load tracks failed")
+                }
+            }
+        }
+    }
+
+    @objc public func durationVideo(at inputPath: String, outputPath: String, targetDuration: Double, completion: @escaping (Bool, Error?) -> Void) {
+        let asset = AVURLAsset(url: URL(fileURLWithPath: inputPath))
+        let duration = asset.duration
+        let timeScale = Int32(duration.timescale)
+        
+        let length = CMTimeGetSeconds(asset.duration)
+        if length <= targetDuration {
+            let composition = AVMutableComposition()
+            
+            guard let compositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
+                return
+            }
+
+            guard let assetTrack = asset.tracks(withMediaType: .video).first else {
+                return
+            }
+            
+            compositionTrack.preferredTransform = assetTrack.preferredTransform
+
+            do {
+                try compositionTrack.insertTimeRange(CMTimeRange(start: .zero, duration: duration),
+                                                     of: assetTrack,
+                                                     at: .zero)
+            } catch {
+                print("Failed to insert time range: \(error)")
+                return
+            }
+            
+            guard let firstFrame = getFrame(from: asset, at: CMTime(value: 0, timescale: timeScale)) else {
+                print("Failed to insert getFrame firstFrame")
+                return
+            }
+            guard let lastFrame = getFrame(from: asset, at: CMTimeSubtract(duration, CMTime(value: 1, timescale: timeScale))) else {
+                print("Failed to insert getFrame lastFrame")
+                return
+            }
+            
+            let firstPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/first.mp4"
+            let lastPath = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! + "/last.mp4"
+            let firstURL = URL(fileURLWithPath: firstPath)
+            let lastURL = URL(fileURLWithPath: lastPath)
+            try? FileManager.default.removeItem(atPath: firstPath)
+            try? FileManager.default.removeItem(atPath: lastPath)
+            
+            let prefixDuration = CMTime(seconds: (targetDuration - duration.seconds) / 2, preferredTimescale: timeScale)
+            let suffixDuration = CMTime(seconds: (targetDuration - duration.seconds) / 2, preferredTimescale: timeScale)
+            
+            self.createVideo(from: firstFrame, duration: CMTime(value: Int64(1 * timeScale), timescale: timeScale), outputURL: firstURL) { success in
+                self.appendToComposition(compositionTrack, asset: AVAsset(url: firstURL), duration: prefixDuration, at: .zero)
+                self.createVideo(from: lastFrame, duration: CMTime(value: Int64(1 * timeScale), timescale: timeScale), outputURL: lastURL) { success in
+                    self.appendToComposition(compositionTrack, asset: AVAsset(url: lastURL), duration: suffixDuration, at: CMTimeAdd(prefixDuration, duration))
+                    let exporter = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
+                    exporter?.outputURL = URL(fileURLWithPath: outputPath)
+                    exporter?.outputFileType = .mp4
+                    exporter?.exportAsynchronously {
+                        switch exporter?.status {
+                        case .completed:
+                            completion(true, nil)
+                        default:
+                            completion(false, exporter?.error)
+                        }
+                    }
+                }
+            }
+        } else {
+            
+            let startTime = length / 2 - targetDuration / 2
+            let endTime = length / 2 + targetDuration / 2
+            
+            let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality)!
+            exportSession.outputURL = URL(fileURLWithPath: outputPath)
+            exportSession.outputFileType = .mp4
+            exportSession.timeRange = CMTimeRangeFromTimeToTime(start: CMTimeMakeWithSeconds(startTime, preferredTimescale: timeScale), end: CMTimeMakeWithSeconds(endTime, preferredTimescale: timeScale))
+            exportSession.exportAsynchronously {
+                switch exportSession.status {
+                case .completed:
+                    completion(true, nil)
+                default:
+                    completion(false, exportSession.error)
+                }
+            }
+        }
+    }
+
+    func getFrame(from asset: AVAsset, at timestamp: CMTime) -> UIImage? {
+        let imageGenerator = AVAssetImageGenerator(asset: asset)
+        imageGenerator.requestedTimeToleranceBefore = .zero
+        imageGenerator.requestedTimeToleranceAfter = .zero
+
+        var actualTime = CMTime.zero
+        let cgImage = try? imageGenerator.copyCGImage(at: timestamp, actualTime: &actualTime)
+        return cgImage.map(UIImage.init)
+    }
+
+    func appendToComposition(_ compositionTrack: AVMutableCompositionTrack, asset: AVAsset, duration: CMTime, at: CMTime) {
+        guard let assetTrack = asset.tracks(withMediaType: .video).first else { return }
+        
+        let frameDuration = CMTime(value: 1, timescale: 30)
+        var currentTime = at
+        let endTime = CMTimeAdd(currentTime, duration)
+
+        while currentTime < endTime {
+            let nextTime = CMTimeAdd(currentTime, frameDuration)
+            do {
+                try compositionTrack.insertTimeRange(CMTimeRange(start: .zero, duration: frameDuration),
+                                                     of: assetTrack,
+                                                     at: currentTime)
+            } catch {
+                print("Error inserting time range: \(error)")
+                return
+            }
+            currentTime = nextTime
+        }
+    }
+
+    @objc func createVideo(from image: UIImage, duration: CMTime, outputURL: URL, completion: @escaping (Bool) -> Void) {
+        let writer = try? AVAssetWriter(outputURL: outputURL, fileType: .mov)
+
+        let writerInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
+            AVVideoCodecKey: AVVideoCodecType.h264,
+            AVVideoWidthKey: NSNumber(value: Float(image.size.width)),
+            AVVideoHeightKey: NSNumber(value: Float(image.size.height))
+        ])
+
+        let adaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: nil)
+
+        writer?.add(writerInput)
+
+        writer?.startWriting()
+        writer?.startSession(atSourceTime: .zero)
+
+        let buffer = pixelBuffer(from: image)
+
+        var frameCount = 0.0
+        let frameDuration = CMTime(value: 1, timescale: 30) // Example frame rate: 30 fps
+        var presentTime = CMTime.zero
+
+        writerInput.requestMediaDataWhenReady(on: DispatchQueue(label: "mediaInputQueue")) {
+            while frameCount < duration.seconds * 30 { // 30 fps * duration in seconds
+                if writerInput.isReadyForMoreMediaData {
+                    if let buffer = buffer {
+                        adaptor.append(buffer, withPresentationTime: presentTime)
+                    }
+                    presentTime = CMTimeAdd(presentTime, frameDuration)
+                    frameCount += 1
+                }
+            }
+            writerInput.markAsFinished()
+            writer?.finishWriting {
+                switch writer?.status {
+                case .completed:
+                    print("Video file created successfully.")
+                    completion(true)
+                default:
+                    print("Failed to write video file: \(writer?.error?.localizedDescription ?? "Unknown error")")
+                    completion(false)
+                }
+            }
+        }
+    }
+
+    func pixelBuffer(from image: UIImage) -> CVPixelBuffer? {
+        let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
+        var pixelBuffer: CVPixelBuffer?
+        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(image.size.width), Int(image.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
+        guard status == kCVReturnSuccess else {
+            return nil
+        }
+
+        CVPixelBufferLockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
+        let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer!)
+
+        let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
+        let context = CGContext(data: pixelData, width: Int(image.size.width), height: Int(image.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
+
+        context?.translateBy(x: 0, y: image.size.height)
+        context?.scaleBy(x: 1.0, y: -1.0)
+
+        UIGraphicsPushContext(context!)
+        image.draw(in: CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height))
+        UIGraphicsPopContext()
+        CVPixelBufferUnlockBaseAddress(pixelBuffer!, CVPixelBufferLockFlags(rawValue: 0))
+
+        return pixelBuffer
+    }
+}

+ 158 - 0
TSLiveWallpaper/Common/ThirdParty/Util/LivePhotoConverter.swift

@@ -0,0 +1,158 @@
+//
+//  LivePhotoTool.swift
+//  TSLiveWallpaper
+//
+//  Created by 100Years on 2024/12/31.
+//
+
+import Foundation
+import AVFoundation
+import UIKit
+import Photos
+
+class LivePhotoConverter {
+    static func convertVideo(
+        _ videoURL: URL,
+        imageURL:URL? = nil,
+        completion: @escaping (Bool, URL?, URL?, String?) -> Void
+    ) {
+        print("Start converting...")
+        
+        guard let metaURL = Bundle.main.url(forResource: "metadata", withExtension: "mov") else {
+            completion(false, nil, nil, "Metadata file not found")
+            return
+        }
+        
+        
+        let fileManager = FileManager.default
+        let eidtVideoPathURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!.appendingPathComponent("livePhoto").appendingPathComponent("editVideo")
+
+        
+        
+        let documentPath = eidtVideoPathURL.path
+        let durationPath = "\(documentPath)/duration.mp4"
+        let acceleratePath = "\(documentPath)/accelerate.mp4"
+        let resizePath = "\(documentPath)/resize.mp4"
+        
+        let destinationDirectory = eidtVideoPathURL
+        // 如果目标文件夹不存在,创建文件夹
+        if !fileManager.fileExists(atPath: destinationDirectory.path) {
+            do {
+                try fileManager.createDirectory(at: destinationDirectory, withIntermediateDirectories: true, attributes: nil)
+                debugPrint("创建文件夹成功")
+            } catch {
+                debugPrint("尝试创建文件夹失败: \(error.localizedDescription)")
+            }
+        }
+        
+        try? FileManager.default.removeItem(atPath: durationPath)
+        try? FileManager.default.removeItem(atPath: acceleratePath)
+        try? FileManager.default.removeItem(atPath: resizePath)
+        
+        let asset = AVURLAsset(url: videoURL)
+        let targetDuration = CMTimeGetSeconds(asset.duration)
+//        let videoSize = asset.tracks(withMediaType: .video).first?.naturalSize ?? CGSize(width: 1080, height: 1920)
+        let videoSize = CGSize(width: 1080, height: 1920)
+        let livePhotoDuration = CMTimeMake(value: 550, timescale: 600)
+        let assetIdentifier = UUID().uuidString
+        let finalPath = resizePath
+        let converter = Converter4Video(path: finalPath)
+        
+        converter.durationVideo(at: videoURL.path, outputPath: durationPath, targetDuration: targetDuration) { success, error in
+            guard success else {
+                completion(false, nil, nil, error?.localizedDescription)
+                return
+            }
+            
+            converter.accelerateVideo(at: durationPath, to: livePhotoDuration, outputPath: acceleratePath) { success, error in
+                guard success else {
+                    completion(false, nil, nil, error?.localizedDescription)
+                    return
+                }
+                
+                converter.resizeVideo(at: acceleratePath, outputPath: resizePath, outputSize: videoSize) { success, error in
+                    guard success else {
+                        completion(false, nil, nil, error?.localizedDescription)
+                        return
+                    }
+
+                    let picturePath = "\(documentPath)/live.heic"
+                    let videoPath = "\(documentPath)/live.mov"
+                    
+                    try? FileManager.default.removeItem(atPath: picturePath)
+                    try? FileManager.default.removeItem(atPath: videoPath)
+                    
+                    let asset = AVURLAsset(url:URL(fileURLWithPath: finalPath))
+                    var image:UIImage? = nil
+                    if let imageURL = imageURL {
+                        image = UIImage(contentsOfFile: imageURL.path)
+                    }else{
+                        if let imageData = self.generateKeyPhoto(from: asset) {
+                            image = UIImage(data: imageData)
+                        }
+                    }
+                    
+                    guard let image = image else {
+                        completion(false, nil, nil, "image nil")
+                        return
+                    }
+                    
+                    let imageConverter = Converter4Image(image:image)
+                    imageConverter.write(dest: picturePath, assetIdentifier: assetIdentifier)
+                    converter.write(dest: videoPath, assetIdentifier: assetIdentifier, metaURL: metaURL) { success, error in
+                        guard success else {
+                            completion(false, nil, nil, error?.localizedDescription)
+                            return
+                        }
+                        
+                        let photoURL = URL(fileURLWithPath: picturePath)
+                        let pairedVideoURL = URL(fileURLWithPath: videoPath)
+                        DispatchQueue.main.async {
+                            completion(success, photoURL, pairedVideoURL, error?.localizedDescription)
+                        }
+                    }
+                }
+            }
+        }
+    }
+    
+
+    private static func generateKeyPhoto(from videoAsset: AVAsset) -> Data? {
+        var percent:Float = 0.5
+        if let stillImageTime = videoAsset.stillImageTime() {
+            percent = Float(stillImageTime.value) / Float(videoAsset.duration.value)
+        }
+        guard let imageFrame = videoAsset.getAssetFrame(percent: percent) else { return nil }
+        guard let jpegData = imageFrame.jpegData(compressionQuality: 1.0) else { return nil }
+        return jpegData
+    }
+    
+    static func livePhotoRequest(videoURL:URL,imageURL:URL,completion: @escaping (PHLivePhoto?) -> Void){
+        _ = PHLivePhoto.request(withResourceFileURLs: [videoURL, imageURL], placeholderImage: nil, targetSize: CGSize.zero, contentMode: PHImageContentMode.aspectFit, resultHandler: { (livePhoto: PHLivePhoto?, info: [AnyHashable : Any]) -> Void in
+            if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
+                return
+            }
+            DispatchQueue.main.async {
+                completion(livePhoto)
+            }
+        })
+    }
+    
+
+    static func saveToLibrary(videoURL:URL,imageURL:URL,completion: @escaping (Bool) -> Void){
+        PHPhotoLibrary.shared().performChanges({
+            let creationRequest = PHAssetCreationRequest.forAsset()
+            let options = PHAssetResourceCreationOptions()
+            creationRequest.addResource(with: PHAssetResourceType.pairedVideo, fileURL: videoURL, options: options)
+            creationRequest.addResource(with: PHAssetResourceType.photo, fileURL: imageURL, options: options)
+        }, completionHandler: { (success, error) in
+            if error != nil {
+                print(error as Any)
+            }
+            DispatchQueue.main.async {
+                completion(success)
+            }
+        })
+    }
+}
+

+ 7 - 0
TSLiveWallpaper/Common/ThirdParty/Util/LivePhotoUtil.h

@@ -0,0 +1,7 @@
+#import <Foundation/Foundation.h>
+
+@interface LivePhotoUtil : NSObject
+
++ (void)convertVideo:(NSString*)path complete:(void(^)(BOOL, NSString*))complete;
+
+@end

+ 168 - 0
TSLiveWallpaper/Common/ThirdParty/Util/LivePhotoUtil.m

@@ -0,0 +1,168 @@
+#import <TSLiveWallpaper-Swift.h>
+#import "LivePhotoUtil.h"
+#import <Photos/Photos.h>
+#import <UIKit/UIKit.h>
+#import <ImageIO/ImageIO.h>
+#import <MobileCoreServices/MobileCoreServices.h>
+
+@implementation LivePhotoUtil
+
+
+
++ (void)convertVideo:(NSString*)path complete:(void(^)(BOOL, NSString*))complete{
+    NSLog(@"start converting");
+    
+    NSURL *metaURL = [NSBundle.mainBundle URLForResource:@"metadata" withExtension:@"mov"];
+    CGSize livePhotoSize = CGSizeMake(1080, 1920);
+    CMTime livePhotoDuration = CMTimeMake(550, 600);
+    NSString *assetIdentifier = NSUUID.UUID.UUIDString;
+    
+    NSString *documentPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject;
+    NSString *durationPath = [documentPath stringByAppendingString:@"/duration.mp4"];
+    NSString *acceleratePath = [documentPath stringByAppendingString:@"/accelerate.mp4"];
+    NSString *resizePath = [documentPath stringByAppendingString:@"/resize.mp4"];
+    [NSFileManager.defaultManager removeItemAtPath:durationPath error:nil];
+    [NSFileManager.defaultManager removeItemAtPath:acceleratePath error:nil];
+    [NSFileManager.defaultManager removeItemAtPath:resizePath error:nil];
+    NSString *finalPath = resizePath;
+    
+    AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:path] options:nil];
+    CGFloat targetDuration = 3;
+//    targetDuration = CMTimeGetSeconds(asset.duration);
+//    livePhotoSize = asset.tracks.firstObject.naturalSize;
+    
+
+    Converter4Video *converter = [[Converter4Video alloc] initWithPath:finalPath];
+    [converter durationVideoAt:path outputPath:durationPath targetDuration:targetDuration completion:^(BOOL success, NSError * error) {
+        [converter accelerateVideoAt:durationPath to:livePhotoDuration outputPath:acceleratePath completion:^(BOOL success, NSError * error) {
+            [converter resizeVideoAt:acceleratePath outputPath:resizePath outputSize:livePhotoSize completion:^(BOOL success, NSError * error) {
+                
+                AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:finalPath] options:nil];
+                AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
+                generator.appliesPreferredTrackTransform = YES;
+                generator.requestedTimeToleranceAfter = kCMTimeZero;
+                generator.requestedTimeToleranceBefore = kCMTimeZero;
+                    NSMutableArray *times = @[].mutableCopy;
+            //        for (int i=0; i<10; i++) {
+            //            if (i!=5) {
+            //                continue;
+            //            }
+                        CMTime time = CMTimeMakeWithSeconds(0.5, asset.duration.timescale);
+                        [times addObject:[NSValue valueWithCMTime:time]];
+            //        }
+                    dispatch_queue_t q = dispatch_queue_create("image", DISPATCH_QUEUE_SERIAL);
+                    __block int index = 0;
+                [generator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
+                    if (image){
+                        NSString *picturePath = [documentPath stringByAppendingFormat:@"/%@%d.heic", @"live", index, nil];
+                        NSString *videoPath = [documentPath stringByAppendingFormat:@"/%@%d.mov", @"live", index, nil];
+                        index += 1;
+                        [NSFileManager.defaultManager removeItemAtPath:picturePath error:nil];
+                        [NSFileManager.defaultManager removeItemAtPath:videoPath error:nil];
+                        
+                        Converter4Image *converter4Image = [[Converter4Image alloc] initWithImage:[UIImage imageWithCGImage:image]];
+                        dispatch_async(q, ^{
+                            [converter4Image writeWithDest:picturePath assetIdentifier:assetIdentifier];
+                            [converter writeWithDest:videoPath assetIdentifier:assetIdentifier metaURL:metaURL completion:^(BOOL success, NSError * error) {
+                                if (!success) {
+                                    NSLog(@"merge failed: %@", error);
+                                    complete(NO, error.localizedDescription);
+                                    return;
+                                }
+                                [PHPhotoLibrary.sharedPhotoLibrary performChanges:^{
+                                    PHAssetCreationRequest *request = [PHAssetCreationRequest creationRequestForAsset];
+                                    NSURL *photoURL = [NSURL fileURLWithPath:picturePath];
+                                    NSURL *pairedVideoURL = [NSURL fileURLWithPath:videoPath];
+                                    [request addResourceWithType:PHAssetResourceTypePhoto fileURL:photoURL options:[PHAssetResourceCreationOptions new]];
+                                    [request addResourceWithType:PHAssetResourceTypePairedVideo fileURL:pairedVideoURL options:[PHAssetResourceCreationOptions new]];
+                                } completionHandler:^(BOOL success, NSError * _Nullable error) {
+                                    dispatch_async(dispatch_get_main_queue(), ^{
+                                        complete(error==nil, error.localizedDescription);
+                                    });
+                                }];
+                            }];
+                        });
+                    }
+                }];
+            }];
+        }];
+    }];
+}
+
+
+//+ (void)convertVideo:(NSString*)path complete:(void(^)(BOOL, NSString*))complete{
+//    NSLog(@"start converting");
+//    
+//    NSURL *metaURL = [NSBundle.mainBundle URLForResource:@"metadata" withExtension:@"mov"];
+//    CGSize livePhotoSize = CGSizeMake(1080, 1920);
+//    CMTime livePhotoDuration = CMTimeMake(550, 600);
+//    NSString *assetIdentifier = NSUUID.UUID.UUIDString;
+//    
+//    NSString *documentPath = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES).firstObject;
+//    NSString *durationPath = [documentPath stringByAppendingString:@"/duration.mp4"];
+//    NSString *acceleratePath = [documentPath stringByAppendingString:@"/accelerate.mp4"];
+//    NSString *resizePath = [documentPath stringByAppendingString:@"/resize.mp4"];
+//    [NSFileManager.defaultManager removeItemAtPath:durationPath error:nil];
+//    [NSFileManager.defaultManager removeItemAtPath:acceleratePath error:nil];
+//    [NSFileManager.defaultManager removeItemAtPath:resizePath error:nil];
+//    NSString *finalPath = resizePath;
+//
+//    Converter4Video *converter = [[Converter4Video alloc] initWithPath:finalPath];
+//    
+//    [converter durationVideoAt:path outputPath:durationPath targetDuration:3 completion:^(BOOL success, NSError * error) {
+//        [converter accelerateVideoAt:durationPath to:livePhotoDuration outputPath:acceleratePath completion:^(BOOL success, NSError * error) {
+//            [converter resizeVideoAt:acceleratePath outputPath:resizePath outputSize:livePhotoSize completion:^(BOOL success, NSError * error) {
+//                
+//                AVURLAsset *asset = [AVURLAsset URLAssetWithURL:[NSURL fileURLWithPath:finalPath] options:nil];
+//                AVAssetImageGenerator *generator = [AVAssetImageGenerator assetImageGeneratorWithAsset:asset];
+//                generator.appliesPreferredTrackTransform = YES;
+//                generator.requestedTimeToleranceAfter = kCMTimeZero;
+//                generator.requestedTimeToleranceBefore = kCMTimeZero;
+//                    NSMutableArray *times = @[].mutableCopy;
+//            //        for (int i=0; i<10; i++) {
+//            //            if (i!=5) {
+//            //                continue;
+//            //            }
+//                        CMTime time = CMTimeMakeWithSeconds(0.5, asset.duration.timescale);
+//                        [times addObject:[NSValue valueWithCMTime:time]];
+//            //        }
+//                    dispatch_queue_t q = dispatch_queue_create("image", DISPATCH_QUEUE_SERIAL);
+//                    __block int index = 0;
+//                [generator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef  _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
+//                    if (image){
+//                        NSString *picturePath = [documentPath stringByAppendingFormat:@"/%@%d.heic", @"live", index, nil];
+//                        NSString *videoPath = [documentPath stringByAppendingFormat:@"/%@%d.mov", @"live", index, nil];
+//                        index += 1;
+//                        [NSFileManager.defaultManager removeItemAtPath:picturePath error:nil];
+//                        [NSFileManager.defaultManager removeItemAtPath:videoPath error:nil];
+//                        
+//                        Converter4Image *converter4Image = [[Converter4Image alloc] initWithImage:[UIImage imageWithCGImage:image]];
+//                        dispatch_async(q, ^{
+//                            [converter4Image writeWithDest:picturePath assetIdentifier:assetIdentifier];
+//                            [converter writeWithDest:videoPath assetIdentifier:assetIdentifier metaURL:metaURL completion:^(BOOL success, NSError * error) {
+//                                if (!success) {
+//                                    NSLog(@"merge failed: %@", error);
+//                                    complete(NO, error.localizedDescription);
+//                                    return;
+//                                }
+//                                [PHPhotoLibrary.sharedPhotoLibrary performChanges:^{
+//                                    PHAssetCreationRequest *request = [PHAssetCreationRequest creationRequestForAsset];
+//                                    NSURL *photoURL = [NSURL fileURLWithPath:picturePath];
+//                                    NSURL *pairedVideoURL = [NSURL fileURLWithPath:videoPath];
+//                                    [request addResourceWithType:PHAssetResourceTypePhoto fileURL:photoURL options:[PHAssetResourceCreationOptions new]];
+//                                    [request addResourceWithType:PHAssetResourceTypePairedVideo fileURL:pairedVideoURL options:[PHAssetResourceCreationOptions new]];
+//                                } completionHandler:^(BOOL success, NSError * _Nullable error) {
+//                                    dispatch_async(dispatch_get_main_queue(), ^{
+//                                        complete(error==nil, error.localizedDescription);
+//                                    });
+//                                }];
+//                            }];
+//                        });
+//                    }
+//                }];
+//            }];
+//        }];
+//    }];
+//}
+
+@end

BIN
TSLiveWallpaper/Common/ThirdParty/Util/metadata.mov


BIN
TSLiveWallpaper/Common/ThirdParty/Util/origin.mp4


+ 44 - 15
TSLiveWallpaper/Common/Tool/TSFileManagerTool.swift

@@ -7,12 +7,45 @@
 
 class TSFileManagerTool {
     
+    /// 获取 Video 下载后保存的的文件件路径
+    static var saveDownVideoPathURL:URL = {
+        let saveVideoPathURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!.appendingPathComponent("livePhoto").appendingPathComponent("saveVideo")
+        return saveVideoPathURL
+    }()
     
+    /// 获取 Video 临时编辑的文件件路径
+    static var editLiveVideoPathURL:URL = {
+        let editVideoPathURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!.appendingPathComponent("livePhoto").appendingPathComponent("editVideo")
+        return editVideoPathURL
+    }()
     
+    /// 获取 Video 编辑后保存的的文件件路径
+    static var saveLiveVideoPathURL:URL = {
+        let saveVideoPathURL = FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!.appendingPathComponent("livePhoto").appendingPathComponent("saveVideo")
+        return saveVideoPathURL
+    }()
+    
+    
+    /// 获取沙盒 Documents 目录路径
+    static var documentsDirectory: URL {
+        return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
+    }
+
+    /// 获取沙盒 Cache 目录路径
+    static var cacheDirectory: URL {
+        return FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!
+    }
+
+    /// 获取沙盒 Temporary 目录路径
+    static var temporaryDirectory: URL {
+        return FileManager.default.temporaryDirectory
+    }
+
     static func copyFileWithOverwrite(from sourceURL: URL, to targetURL: URL) {
         let fileManager = FileManager.default
         do {
             removeItem(from: targetURL)
+            checkFolderAndCreate(from: targetURL)
             try fileManager.copyItem(at: sourceURL, to: targetURL)
             debugPrint("文件复制成功!")
         } catch {
@@ -77,21 +110,17 @@ class TSFileManagerTool {
         }
     }
     
-    
-    
-    /// 获取沙盒 Documents 目录路径
-    static var documentsDirectory: URL {
-        return FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
-    }
-
-    /// 获取沙盒 Cache 目录路径
-    static var cacheDirectory: URL {
-        return FileManager.default.urls(for: .cachesDirectory, in: .userDomainMask).first!
-    }
-
-    /// 获取沙盒 Temporary 目录路径
-    static var temporaryDirectory: URL {
-        return FileManager.default.temporaryDirectory
+    static func checkFolderAndCreate(from destinationURL: URL){
+        let fileManager = FileManager.default
+        let destinationDirectory = destinationURL.deletingLastPathComponent()
+        // 如果目标文件夹不存在,创建文件夹
+        if !fileManager.fileExists(atPath: destinationDirectory.path) {
+            do {
+                try fileManager.createDirectory(at: destinationDirectory, withIntermediateDirectories: true, attributes: nil)
+            } catch {
+                debugPrint("尝试创建文件夹失败: \(error.localizedDescription)")
+            }
+        }
     }
 
     // MARK: - 文件操作方法

+ 3 - 7
TSLiveWallpaper/DataManger/TSImageDataCenter.swift

@@ -122,17 +122,13 @@ class TSImageDataCenter{
             if let totalArray = Mapper<TSImageDataSectionModel>().mapArray(JSONString: editLiveHistoryListString){
                 return totalArray
             }else{
-//                let sectionModel = TSImageDataSectionModel()
-//                let itemModel = TSImageDataItemModel()
-//                sectionModel.items = [itemModel]
-//                sectionModel.type = "Historical".localized
-                return []
+                let sectionModel = TSImageDataSectionModel()
+                sectionModel.type = "Historical".localized
+                return [sectionModel]
             }
         }
         
         set{
-            
-            
             if let jsonString = newValue.toJSONString() {
                 editLiveHistoryListString = jsonString
             }