RCTCameraRollManager.mm 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363
  1. /*
  2. * Copyright (c) Facebook, Inc. and its affiliates.
  3. *
  4. * This source code is licensed under the MIT license found in the
  5. * LICENSE file in the root directory of this source tree.
  6. */
  7. #import "RCTCameraRollManager.h"
  8. #import <FBReactNativeSpec/FBReactNativeSpec.h>
  9. #import <Foundation/Foundation.h>
  10. #import <UIKit/UIKit.h>
  11. #import <Photos/Photos.h>
  12. #import <dlfcn.h>
  13. #import <objc/runtime.h>
  14. #import <MobileCoreServices/UTType.h>
  15. #import <React/RCTBridge.h>
  16. #import <React/RCTConvert.h>
  17. #import <React/RCTImageLoader.h>
  18. #import <React/RCTLog.h>
  19. #import <React/RCTUtils.h>
  20. #import "RCTCameraRollPlugins.h"
  21. #import "RCTAssetsLibraryRequestHandler.h"
  22. @implementation RCTConvert (PHAssetCollectionSubtype)
  23. RCT_ENUM_CONVERTER(PHAssetCollectionSubtype, (@{
  24. @"album": @(PHAssetCollectionSubtypeAny),
  25. @"all": @(PHAssetCollectionSubtypeSmartAlbumUserLibrary),
  26. @"event": @(PHAssetCollectionSubtypeAlbumSyncedEvent),
  27. @"faces": @(PHAssetCollectionSubtypeAlbumSyncedFaces),
  28. @"library": @(PHAssetCollectionSubtypeSmartAlbumUserLibrary),
  29. @"photo-stream": @(PHAssetCollectionSubtypeAlbumMyPhotoStream), // incorrect, but legacy
  30. @"photostream": @(PHAssetCollectionSubtypeAlbumMyPhotoStream),
  31. @"saved-photos": @(PHAssetCollectionSubtypeAny), // incorrect, but legacy
  32. @"savedphotos": @(PHAssetCollectionSubtypeAny), // This was ALAssetsGroupSavedPhotos, seems to have no direct correspondence in PHAssetCollectionSubtype
  33. }), PHAssetCollectionSubtypeAny, integerValue)
  34. @end
  35. @implementation RCTConvert (PHFetchOptions)
  36. + (PHFetchOptions *)PHFetchOptionsFromMediaType:(NSString *)mediaType
  37. {
  38. // This is not exhaustive in terms of supported media type predicates; more can be added in the future
  39. NSString *const lowercase = [mediaType lowercaseString];
  40. if ([lowercase isEqualToString:@"photos"]) {
  41. PHFetchOptions *const options = [PHFetchOptions new];
  42. options.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d", PHAssetMediaTypeImage];
  43. return options;
  44. } else if ([lowercase isEqualToString:@"videos"]) {
  45. PHFetchOptions *const options = [PHFetchOptions new];
  46. options.predicate = [NSPredicate predicateWithFormat:@"mediaType = %d", PHAssetMediaTypeVideo];
  47. return options;
  48. } else {
  49. if (![lowercase isEqualToString:@"all"]) {
  50. RCTLogError(@"Invalid filter option: '%@'. Expected one of 'photos',"
  51. "'videos' or 'all'.", mediaType);
  52. }
  53. // This case includes the "all" mediatype
  54. return nil;
  55. }
  56. }
  57. @end
  58. @interface RCTCameraRollManager() <NativeCameraRollManagerSpec>
  59. @end
  60. @implementation RCTCameraRollManager
  61. RCT_EXPORT_MODULE()
  62. @synthesize bridge = _bridge;
  63. static NSString *const kErrorUnableToSave = @"E_UNABLE_TO_SAVE";
  64. static NSString *const kErrorUnableToLoad = @"E_UNABLE_TO_LOAD";
  65. static NSString *const kErrorAuthRestricted = @"E_PHOTO_LIBRARY_AUTH_RESTRICTED";
  66. static NSString *const kErrorAuthDenied = @"E_PHOTO_LIBRARY_AUTH_DENIED";
  67. typedef void (^PhotosAuthorizedBlock)(void);
  68. static void requestPhotoLibraryAccess(RCTPromiseRejectBlock reject, PhotosAuthorizedBlock authorizedBlock) {
  69. PHAuthorizationStatus authStatus = [PHPhotoLibrary authorizationStatus];
  70. if (authStatus == PHAuthorizationStatusRestricted) {
  71. reject(kErrorAuthRestricted, @"Access to photo library is restricted", nil);
  72. } else if (authStatus == PHAuthorizationStatusAuthorized) {
  73. authorizedBlock();
  74. } else if (authStatus == PHAuthorizationStatusNotDetermined) {
  75. [PHPhotoLibrary requestAuthorization:^(PHAuthorizationStatus status) {
  76. requestPhotoLibraryAccess(reject, authorizedBlock);
  77. }];
  78. } else {
  79. reject(kErrorAuthDenied, @"Access to photo library was denied", nil);
  80. }
  81. }
  82. RCT_EXPORT_METHOD(saveToCameraRoll:(NSURLRequest *)request
  83. type:(NSString *)type
  84. resolve:(RCTPromiseResolveBlock)resolve
  85. reject:(RCTPromiseRejectBlock)reject)
  86. {
  87. __block PHObjectPlaceholder *placeholder;
  88. // We load images and videos differently.
  89. // Images have many custom loaders which can load images from ALAssetsLibrary URLs, PHPhotoLibrary
  90. // URLs, `data:` URIs, etc. Video URLs are passed directly through for now; it may be nice to support
  91. // more ways of loading videos in the future.
  92. __block NSURL *inputURI = nil;
  93. __block UIImage *inputImage = nil;
  94. void (^saveBlock)(void) = ^void() {
  95. // performChanges and the completionHandler are called on
  96. // arbitrary threads, not the main thread - this is safe
  97. // for now since all JS is queued and executed on a single thread.
  98. // We should reevaluate this if that assumption changes.
  99. [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
  100. PHAssetChangeRequest *changeRequest;
  101. // Defaults to "photo". `type` is an optional param.
  102. if ([type isEqualToString:@"video"]) {
  103. changeRequest = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:inputURI];
  104. } else {
  105. changeRequest = [PHAssetChangeRequest creationRequestForAssetFromImage:inputImage];
  106. }
  107. placeholder = [changeRequest placeholderForCreatedAsset];
  108. } completionHandler:^(BOOL success, NSError * _Nullable error) {
  109. if (success) {
  110. NSString *uri = [NSString stringWithFormat:@"ph://%@", [placeholder localIdentifier]];
  111. resolve(uri);
  112. } else {
  113. reject(kErrorUnableToSave, nil, error);
  114. }
  115. }];
  116. };
  117. void (^loadBlock)(void) = ^void() {
  118. if ([type isEqualToString:@"video"]) {
  119. inputURI = request.URL;
  120. saveBlock();
  121. } else {
  122. [[self.bridge moduleForClass:[RCTImageLoader class]] loadImageWithURLRequest:request callback:^(NSError *error, UIImage *image) {
  123. if (error) {
  124. reject(kErrorUnableToLoad, nil, error);
  125. return;
  126. }
  127. inputImage = image;
  128. saveBlock();
  129. }];
  130. }
  131. };
  132. requestPhotoLibraryAccess(reject, loadBlock);
  133. }
  134. static void RCTResolvePromise(RCTPromiseResolveBlock resolve,
  135. NSArray<NSDictionary<NSString *, id> *> *assets,
  136. BOOL hasNextPage)
  137. {
  138. if (!assets.count) {
  139. resolve(@{
  140. @"edges": assets,
  141. @"page_info": @{
  142. @"has_next_page": @NO,
  143. }
  144. });
  145. return;
  146. }
  147. resolve(@{
  148. @"edges": assets,
  149. @"page_info": @{
  150. @"start_cursor": assets[0][@"node"][@"image"][@"uri"],
  151. @"end_cursor": assets[assets.count - 1][@"node"][@"image"][@"uri"],
  152. @"has_next_page": @(hasNextPage),
  153. }
  154. });
  155. }
  156. RCT_EXPORT_METHOD(getPhotos:(JS::NativeCameraRollManager::GetPhotosParams &)params
  157. resolve:(RCTPromiseResolveBlock)resolve
  158. reject:(RCTPromiseRejectBlock)reject)
  159. {
  160. checkPhotoLibraryConfig();
  161. NSUInteger const first = [RCTConvert NSInteger:[NSNumber numberWithDouble:params.first()]];
  162. NSString *const afterCursor = [RCTConvert NSString:params.after()];
  163. NSString *const groupName = [RCTConvert NSString:params.groupName()];
  164. NSString *const groupTypes = [[RCTConvert NSString:params.groupTypes()] lowercaseString];
  165. NSString *const mediaType = [RCTConvert NSString:params.assetType()];
  166. NSArray<NSString *> *const mimeTypes = [RCTConvert NSStringArray:RCTConvertOptionalVecToArray(params.mimeTypes())];
  167. // If groupTypes is "all", we want to fetch the SmartAlbum "all photos". Otherwise, all
  168. // other groupTypes values require the "album" collection type.
  169. PHAssetCollectionType const collectionType = ([groupTypes isEqualToString:@"all"]
  170. ? PHAssetCollectionTypeSmartAlbum
  171. : PHAssetCollectionTypeAlbum);
  172. PHAssetCollectionSubtype const collectionSubtype = [RCTConvert PHAssetCollectionSubtype:groupTypes];
  173. // Predicate for fetching assets within a collection
  174. PHFetchOptions *const assetFetchOptions = [RCTConvert PHFetchOptionsFromMediaType:mediaType];
  175. assetFetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:NO]];
  176. BOOL __block foundAfter = NO;
  177. BOOL __block hasNextPage = NO;
  178. BOOL __block resolvedPromise = NO;
  179. NSMutableArray<NSDictionary<NSString *, id> *> *assets = [NSMutableArray new];
  180. // Filter collection name ("group")
  181. PHFetchOptions *const collectionFetchOptions = [PHFetchOptions new];
  182. collectionFetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"endDate" ascending:NO]];
  183. if (groupName != nil) {
  184. collectionFetchOptions.predicate = [NSPredicate predicateWithFormat:[NSString stringWithFormat:@"localizedTitle == '%@'", groupName]];
  185. }
  186. requestPhotoLibraryAccess(reject, ^{
  187. PHFetchResult<PHAssetCollection *> *const assetCollectionFetchResult = [PHAssetCollection fetchAssetCollectionsWithType:collectionType subtype:collectionSubtype options:collectionFetchOptions];
  188. [assetCollectionFetchResult enumerateObjectsUsingBlock:^(PHAssetCollection * _Nonnull assetCollection, NSUInteger collectionIdx, BOOL * _Nonnull stopCollections) {
  189. // Enumerate assets within the collection
  190. PHFetchResult<PHAsset *> *const assetsFetchResult = [PHAsset fetchAssetsInAssetCollection:assetCollection options:assetFetchOptions];
  191. [assetsFetchResult enumerateObjectsUsingBlock:^(PHAsset * _Nonnull asset, NSUInteger assetIdx, BOOL * _Nonnull stopAssets) {
  192. NSString *const uri = [NSString stringWithFormat:@"ph://%@", [asset localIdentifier]];
  193. if (afterCursor && !foundAfter) {
  194. if ([afterCursor isEqualToString:uri]) {
  195. foundAfter = YES;
  196. }
  197. return; // skip until we get to the first one
  198. }
  199. // Get underlying resources of an asset - this includes files as well as details about edited PHAssets
  200. if ([mimeTypes count] > 0) {
  201. NSArray<PHAssetResource *> *const assetResources = [PHAssetResource assetResourcesForAsset:asset];
  202. if (![assetResources firstObject]) {
  203. return;
  204. }
  205. PHAssetResource *const _Nonnull resource = [assetResources firstObject];
  206. CFStringRef const uti = (__bridge CFStringRef _Nonnull)(resource.uniformTypeIdentifier);
  207. NSString *const mimeType = (NSString *)CFBridgingRelease(UTTypeCopyPreferredTagWithClass(uti, kUTTagClassMIMEType));
  208. BOOL __block mimeTypeFound = NO;
  209. [mimeTypes enumerateObjectsUsingBlock:^(NSString * _Nonnull mimeTypeFilter, NSUInteger idx, BOOL * _Nonnull stop) {
  210. if ([mimeType isEqualToString:mimeTypeFilter]) {
  211. mimeTypeFound = YES;
  212. *stop = YES;
  213. }
  214. }];
  215. if (!mimeTypeFound) {
  216. return;
  217. }
  218. }
  219. // If we've accumulated enough results to resolve a single promise
  220. if (first == assets.count) {
  221. *stopAssets = YES;
  222. *stopCollections = YES;
  223. hasNextPage = YES;
  224. RCTAssert(resolvedPromise == NO, @"Resolved the promise before we finished processing the results.");
  225. RCTResolvePromise(resolve, assets, hasNextPage);
  226. resolvedPromise = YES;
  227. return;
  228. }
  229. NSString *const assetMediaTypeLabel = (asset.mediaType == PHAssetMediaTypeVideo
  230. ? @"video"
  231. : (asset.mediaType == PHAssetMediaTypeImage
  232. ? @"image"
  233. : (asset.mediaType == PHAssetMediaTypeAudio
  234. ? @"audio"
  235. : @"unknown")));
  236. CLLocation *const loc = asset.location;
  237. // A note on isStored: in the previous code that used ALAssets, isStored
  238. // was always set to YES, probably because iCloud-synced images were never returned (?).
  239. // To get the "isStored" information and filename, we would need to actually request the
  240. // image data from the image manager. Those operations could get really expensive and
  241. // would definitely utilize the disk too much.
  242. // Thus, this field is actually not reliable.
  243. // Note that Android also does not return the `isStored` field at all.
  244. [assets addObject:@{
  245. @"node": @{
  246. @"type": assetMediaTypeLabel, // TODO: switch to mimeType?
  247. @"group_name": [assetCollection localizedTitle],
  248. @"image": @{
  249. @"uri": uri,
  250. @"height": @([asset pixelHeight]),
  251. @"width": @([asset pixelWidth]),
  252. @"isStored": @YES, // this field doesn't seem to exist on android
  253. @"playableDuration": @([asset duration]) // fractional seconds
  254. },
  255. @"timestamp": @(asset.creationDate.timeIntervalSince1970),
  256. @"location": (loc ? @{
  257. @"latitude": @(loc.coordinate.latitude),
  258. @"longitude": @(loc.coordinate.longitude),
  259. @"altitude": @(loc.altitude),
  260. @"heading": @(loc.course),
  261. @"speed": @(loc.speed), // speed in m/s
  262. } : @{})
  263. }
  264. }];
  265. }];
  266. }];
  267. // If we get this far and haven't resolved the promise yet, we reached the end of the list of photos
  268. if (!resolvedPromise) {
  269. hasNextPage = NO;
  270. RCTResolvePromise(resolve, assets, hasNextPage);
  271. resolvedPromise = YES;
  272. }
  273. });
  274. }
  275. RCT_EXPORT_METHOD(deletePhotos:(NSArray<NSString *>*)assets
  276. resolve:(RCTPromiseResolveBlock)resolve
  277. reject:(RCTPromiseRejectBlock)reject)
  278. {
  279. NSArray<NSURL *> *assets_ = [RCTConvert NSURLArray:assets];
  280. [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
  281. PHFetchResult<PHAsset *> *fetched =
  282. [PHAsset fetchAssetsWithALAssetURLs:assets_ options:nil];
  283. [PHAssetChangeRequest deleteAssets:fetched];
  284. }
  285. completionHandler:^(BOOL success, NSError *error) {
  286. if (success == YES) {
  287. resolve(@(success));
  288. }
  289. else {
  290. reject(@"Couldn't delete", @"Couldn't delete assets", error);
  291. }
  292. }
  293. ];
  294. }
  295. static void checkPhotoLibraryConfig()
  296. {
  297. #if RCT_DEV
  298. if (![[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSPhotoLibraryUsageDescription"]) {
  299. RCTLogError(@"NSPhotoLibraryUsageDescription key must be present in Info.plist to use camera roll.");
  300. }
  301. #endif
  302. }
  303. - (std::shared_ptr<facebook::react::TurboModule>)
  304. getTurboModuleWithJsInvoker:(std::shared_ptr<facebook::react::CallInvoker>)jsInvoker
  305. nativeInvoker:(std::shared_ptr<facebook::react::CallInvoker>)nativeInvoker
  306. perfLogger:(id<RCTTurboModulePerformanceLogger>)perfLogger
  307. {
  308. return std::make_shared<facebook::react::NativeCameraRollManagerSpecJSI>(self, jsInvoker, nativeInvoker, perfLogger);
  309. }
  310. @end
  311. Class RCTCameraRollManagerCls(void) {
  312. return RCTCameraRollManager.class;
  313. }