diff --git a/.github/workflows/tests_e2e.yml b/.github/workflows/tests_e2e.yml index 122429cfb3..7042a05965 100644 --- a/.github/workflows/tests_e2e.yml +++ b/.github/workflows/tests_e2e.yml @@ -107,10 +107,10 @@ jobs: timeout_minutes: 10 retry_wait_seconds: 60 max_attempts: 3 - command: echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install "system-images;android-28;google_apis;x86_64" + command: echo "y" | $ANDROID_HOME/tools/bin/sdkmanager --install "system-images;android-30;google_apis;x86_64" - name: Create Emulator - run: echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd --force --name TestingAVD --device "Nexus 5X" -k 'system-images;android-28;google_apis;x86_64' -g google_apis + run: echo "no" | $ANDROID_HOME/tools/bin/avdmanager create avd --force --name TestingAVD --device "Nexus 5X" -k 'system-images;android-30;google_apis;x86_64' -g google_apis # These Emulator start steps are the current best practice to do retries on multi-line commands with persistent (nohup) processes - name: Start Android Emulator diff --git a/.gitignore b/.gitignore index 3de0732a0b..7da02a3c46 100644 --- a/.gitignore +++ b/.gitignore @@ -559,6 +559,7 @@ app.admob.js app.smartreply.js eslint-report.json yarn.lock +spelling.json # Gatsby / Website website/.cache diff --git a/.spellcheck.dict.txt b/.spellcheck.dict.txt index fbeff0e235..3e07999a58 100644 --- a/.spellcheck.dict.txt +++ b/.spellcheck.dict.txt @@ -66,6 +66,8 @@ launchProperties learnt Lerna MDX +MLKit +mlkit mono-repo Multidex multidex @@ -96,6 +98,7 @@ PRs PubSub qa react-native-firebase +react-native-mlkit realtime Realtime remarketing diff --git a/README.md b/README.md index eec2c13649..01501b6089 100644 --- a/README.md +++ b/README.md @@ -54,8 +54,7 @@ The main package that you interface with is `App` (`@react-native-firebase/app`) | [Dynamic Links](/packages/dynamic-links) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/dynamic-links.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/dynamic-links) | | [In-app Messaging](/packages/in-app-messaging) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/in-app-messaging.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/in-app-messaging) | | [Instance ID](/packages/iid) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/iid.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/iid) | -| [ML Kit Natural Language](/packages/ml-natural-language) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml-natural-language.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml-natural-language) | -| [ML Kit Vision](/packages/ml-vision) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml-vision.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml-vision) | +| [ML](/packages/ml) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml) | | [Performance Monitoring](/packages/perf) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/perf.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/perf) | | [Realtime Database](/packages/database) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/database.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/database) | | [Remote Config](/packages/remote-config) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/remote-config.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/remote-config) | diff --git a/docs/app/usage.md b/docs/app/usage.md index 4a915a4512..78db960528 100644 --- a/docs/app/usage.md +++ b/docs/app/usage.md @@ -27,8 +27,7 @@ Currently, the native Firebase SDKs only provide functionality for creating seco - [Cloud Functions](/functions) - [Cloud Storage](/storage). - [Instance ID](/iid). -- [ML Kit Natural Language](/ml-language). -- [ML Kit Vision](/ml-vision). +- [ML](/ml). - [Remote Config](/remote-config). ## Initializing secondary apps diff --git a/docs/in-app-messaging/usage/index.md b/docs/in-app-messaging/usage/index.md index d7afd80c69..659c7cb06d 100644 --- a/docs/in-app-messaging/usage/index.md +++ b/docs/in-app-messaging/usage/index.md @@ -2,7 +2,7 @@ title: In App Messaging description: Installation and getting started with In App Messaging. icon: //static.invertase.io/assets/firebase/in-app-messaging.svg -next: /ml-natural-language/usage +next: /ml/usage previous: /iid/usage --- diff --git a/docs/index.md b/docs/index.md index e9462d1638..cd8632037b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -186,15 +186,15 @@ project.ext { // Overriding Build/Android SDK Versions android : [ minSdk : 16, - targetSdk : 29, - compileSdk: 29, - buildTools: "29.0.3" + targetSdk : 30, + compileSdk: 30, + buildTools: "30.0.2" ], // Overriding Library SDK Versions firebase: [ // Override Firebase SDK Version - bom : "25.12.0" + bom : "26.0.0" ], ], ]) @@ -209,7 +209,7 @@ Open your projects `/ios/Podfile` and add any of the globals shown below to the ```ruby # Override Firebase SDK Version -$FirebaseSDKVersion = '6.34.0' +$FirebaseSDKVersion = '7.0.0' ``` Once changed, reinstall your projects pods via pod install and rebuild your project with `npx react-native run-ios`. diff --git a/docs/migrating-to-v6.md b/docs/migrating-to-v6.md index d17d1567d7..05a41de489 100644 --- a/docs/migrating-to-v6.md +++ b/docs/migrating-to-v6.md @@ -26,7 +26,7 @@ been approved before being released. We have also ensured the release is compatible with some of the popular tooling in the React Native community, such as [autolinking](https://github.com/react-native-community/cli/blob/master/docs/autolinking.md) & [TypeScript](https://facebook.github.io/react-native/blog/2018/05/07/using-typescript-with-react-native). -Version 6 also brings support for previously unsupported modules such as [Firebase ML Kit](https://firebase.google.com/docs/ml-kit). +Version 6 also brings support for previously unsupported modules such as [Firebase ML](https://firebase.google.com/docs/ml). ## NPM dependency changes @@ -238,26 +238,25 @@ yarn add @react-native-firebase/auth Install the modules required for your application: -| Module | NPM Package | -| ------------------------------------------------------------ | ------------------------------------------ | -| AdMob | @react-native-firebase/admob | -| Analytics | @react-native-firebase/analytics | -| App | @react-native-firebase/app | -| App Invites | @react-native-firebase/invites | -| Authentication | @react-native-firebase/auth | -| Cloud Firestore | @react-native-firebase/firestore | -| Cloud Functions | @react-native-firebase/functions | -| Cloud Messaging | @react-native-firebase/messaging | -| Cloud Storage | @react-native-firebase/storage | -| Crashlytics | @react-native-firebase/crashlytics | -| Dynamic Links | @react-native-firebase/dynamic-links | -| In-app Messaging | @react-native-firebase/in-app-messaging | -| Instance ID | @react-native-firebase/iid | -| ML Kit Natural Language | @react-native-firebase/ml-natural-language | -| ML Kit Vision | @react-native-firebase/ml-vision | -| Performance Monitoring | @react-native-firebase/perf | -| Realtime Database | @react-native-firebase/database | -| Remote Config | @react-native-firebase/remote-config | +| Module | NPM Package | +| ------------------------------------------------------------ | --------------------------------------- | +| AdMob | @react-native-firebase/admob | +| Analytics | @react-native-firebase/analytics | +| App | @react-native-firebase/app | +| App Invites | @react-native-firebase/invites | +| Authentication | @react-native-firebase/auth | +| Cloud Firestore | @react-native-firebase/firestore | +| Cloud Functions | @react-native-firebase/functions | +| Cloud Messaging | @react-native-firebase/messaging | +| Cloud Storage | @react-native-firebase/storage | +| Crashlytics | @react-native-firebase/crashlytics | +| Dynamic Links | @react-native-firebase/dynamic-links | +| In-app Messaging | @react-native-firebase/in-app-messaging | +| Instance ID | @react-native-firebase/iid | +| ML | @react-native-firebase/ml | +| Performance Monitoring | @react-native-firebase/perf | +| Realtime Database | @react-native-firebase/database | +| Remote Config | @react-native-firebase/remote-config | Users on React Native version 0.60+, the modules will be automatically linked. For users on a lower version, see the module specific pages for manual installation guides. @@ -394,9 +393,9 @@ No breaking changes. ### Notifications -Device-local notification APIs are not actually Firebase APIs at the same time they are very difficult to maintain. +Device-local notification APIs are not actually Firebase APIs at the same time they are very difficult to maintain. -For these reasons the notifications package has been removed from react-native-firebase for versions 6 and higher. +For these reasons the notifications package has been removed from react-native-firebase for versions 6 and higher. How to migrate: If you use device-local notification APIs and user-visible notifications in your app you will want to integrate a separate library that gives you access to device-local notification APIs. Many people have reported success with each of https://notifee.app, https://wix.github.io/react-native-notifications and https://github.com/zo0r/react-native-push-notification @@ -454,14 +453,8 @@ How to migrate: If you use device-local notification APIs and user-visible notif - `firebase.utils.Native` is now deprecated and will be removed in a later release, please rename usages of this to `firebase.utils.FilePath`. - `firebase.utils.Native.*` some properties have been renamed and deprecated and will be removed in a later release, follow the in-app console warnings on how to migrate. -### ML Kit Natural Language +### ML -`@react-native-firebase/ml-natural-language` - -This is a new module. See documentation for usage. - -### ML Kit Vision - -`@react-native-firebase/ml-vision` +`@react-native-firebase/ml` This is a new module. See documentation for usage. diff --git a/docs/ml-natural-language/index.md b/docs/ml-natural-language/index.md deleted file mode 100644 index 144141eb6d..0000000000 --- a/docs/ml-natural-language/index.md +++ /dev/null @@ -1,3 +0,0 @@ ---- -redirect: /ml-natural-language/usage ---- diff --git a/docs/ml-natural-language/usage/index.md b/docs/ml-natural-language/usage/index.md deleted file mode 100644 index 303b24cc1a..0000000000 --- a/docs/ml-natural-language/usage/index.md +++ /dev/null @@ -1,154 +0,0 @@ ---- -title: ML Natural Language -description: Installation and getting started with ML Natural Language. -icon: //static.invertase.io/assets/firebase/ml-kit.svg -next: /ml-vision/usage -previous: /in-app-messaging/usage ---- - -# Installation - -This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the -[Getting Started](/) documentation. - -```bash -# Install & setup the app module -yarn add @react-native-firebase/app - -# Install the ml-natural-language module -yarn add @react-native-firebase/ml-natural-language - -# If you're developing your app using iOS, run this command -cd ios/ && pod install -``` - -If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project, -you can follow the manual installation steps for [iOS](/ml-natural-language/usage/installation/ios) and [Android](/ml-natural-language/usage/installation/android). - -# What does it do - -The React Native Firebase ML Natural Language module supports [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies) -& [Language Identification](https://firebase.google.com/docs/ml-kit/identify-languages) provided by Firebase ML kit. -At this moment, the [Translation](https://firebase.google.com/docs/ml-kit/translation) module is not supported - - - -Smart reply can automatically generate relevant replies to messages. It helps your users respond to messages quickly, -and makes it easier to reply to messages on devices with limited input capabilities. - -Language identification can be used to determine the language of a string of text. It can be useful when working with -user-provided text, which often doesn't come with any language information. - -# Usage - -Each services requires enabling before it can be used within your app. The sections below show how to enable the models -for each service and usage examples of each. - -## Smart Replies - -The [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies) service from Firebase allows you to -generate suggested replies based on a list of on-going conversation data. - -Before using the API, the Smart Reply model must be installed on your device. To enable installation of the model, set -the `ml_natural_language_smart_reply_model` to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_natural_language_smart_reply_model": true - } -} -``` - -Once added, rebuild your application: - -```bash -// For Android -npx react-native run-android - -// For iOS -cd ios/ && pod install -npx react-native run-ios -``` - -Once complete, the `suggestReplies` method allows you to generate potential replies by providing it with an array of text input(s) -which may generate three responses per input as example below: - -```jsx -const replies = await firebase - .naturalLanguage() - .suggestReplies([ - { text: 'Hey, long time no speak!' }, - { text: 'I know right, it has been a while..', userId: '123', isLocalUser: false }, - { text: 'We should catchup some time!' }, - { text: 'Definitely, how about we go for lunch this week?', userId: '123', isLocalUser: false }, - ]); - -replies.forEach(reply => { - console.log(reply.text); -}); -``` - -Each array item an is an instance of a [`TextMessage`](/reference/ml-natural-language/textmessage). At a minimum you -must provide the a `text` property. To help the Machine Learning service identify various users in the conversation, you -can set the `isLocalUser` flag to `false` if the message is from an external user, along with a unique ID. - -Once returned, if the service is able to generate suggested replies you can iterate over the response to extract the `text` -property from the returned [`SuggestedReply`](/reference/ml-natural-language/suggestedreply) instance. - -## Identify language - -The [Language Identification](https://firebase.google.com/docs/ml-kit/identify-languages) service from Firebase allows you to -identify a language from any given string of text. - -Before using the API, the Language Identification model must be installed on your device. To enable installation of the model, set -the `ml_natural_language_language_id_model` to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_natural_language_language_id_model": true - } -} -``` - -Once added, rebuild your application: - -```bash -// For Android -npx react-native run-android - -// For iOS -cd ios/ && pod install -npx react-native run-ios -``` - -The `identifyLanguage` method allows then allows you to identify a language, for example: - -```jsx -const language = await firebase.naturalLanguage().identifyLanguage('Hello there. General Kenobi.'); - -console.log('Identified language: ', language); // en -``` - -# firebase.json - -Add any of the keys indicated below to your `firebase.json` file at the root of your project directory, and set them to -true to enable them. All models and APIs are disabled (false) by default. - -> If you are manually linking on iOS (e.g. not using CocoaPods) then it's up to you to manage these models and dependencies -> yourself - firebase.json support is only for Android and iOS (via Pods). - -```json -// /firebase.json -{ - "react-native": { - // Language Identification - "ml_natural_language_language_id_model": true, - // Smart Replies - "ml_natural_language_smart_reply_model": true - } -} -``` diff --git a/docs/ml-natural-language/usage/installation/android.md b/docs/ml-natural-language/usage/installation/android.md deleted file mode 100644 index e8defa3b8e..0000000000 --- a/docs/ml-natural-language/usage/installation/android.md +++ /dev/null @@ -1,59 +0,0 @@ ---- -title: Android Installation -description: Manually integrate ML Kit Natural Language into your Android application. -next: /ml-natural-language/usage/installation/ios -previous: /ml-natural-language/usage ---- - -# Android Manual Installation - -The following steps are only required if your environment does not have access to React Native -auto-linking. - -## 1. Update Gradle Settings - -Add the following to your projects `/android/settings.gradle` file: - -```groovy -include ':@react-native-firebase_ml-natural-language' -project(':@react-native-firebase_ml-natural-language').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml-natural-language/android') -``` - -## 2. Update Gradle Dependencies - -Add the React Native Functions module dependency to your `/android/app/build.gradle` file: - -```groovy -// .. -dependencies { - // .. - implementation project(path: ":@react-native-firebase_ml-natural-language") -} -``` - -## 3. Add package to the Android Application - -Import and apply the React Native Firebase module package to your `/android/app/src/main/java/**/MainApplication.java` file: - -Import the package: - -```java -import io.invertase.firebase.perf.ReactNativeFirebaseMLNaturalLanguagePackage; -``` - -Add the package to the registry: - -```java -protected List getPackages() { - return Arrays.asList( - new MainReactPackage(), - new ReactNativeFirebaseMLNaturalLanguagePackage(), -``` - -## 4. Rebuild the project - -Once the above steps have been completed, rebuild your Android project: - -```bash -npx react-native run-android -``` diff --git a/docs/ml-natural-language/usage/installation/ios.md b/docs/ml-natural-language/usage/installation/ios.md deleted file mode 100644 index 5defdecaf5..0000000000 --- a/docs/ml-natural-language/usage/installation/ios.md +++ /dev/null @@ -1,37 +0,0 @@ ---- -title: iOS Installation -description: Manually integrate ML Kit Natural Language APIs into your iOS application. -next: /ml-natural-language/usage/installation/android -previous: /ml-natural-language/usage ---- - -# iOS Manual Installation - -The following steps are only required if your environment does not have access to React Native -auto-linking. - -## 1. Add the Pod - -Add the `RNFBMLNaturalLanguage` Pod to your projects `/ios/Podfile`: - -```ruby -target 'app' do - # ... - pod 'RNFBMLNaturalLanguage', :path => '../node_modules/@react-native-firebase/ml-natural-language' -end -``` - -## 2. Update Pods & rebuild the project - -You may need to update your local Pods in order for the `RNFBMLNaturalLanguage` Pod to be installed in your project: - -```bash -$ cd ios/ -$ pod install --repo-update -``` - -Once the Pods have installed locally, rebuild your iOS project: - -```bash -npx react-native run-ios -``` diff --git a/docs/ml-vision/barcode-scanning.md b/docs/ml-vision/barcode-scanning.md deleted file mode 100644 index 30864b4498..0000000000 --- a/docs/ml-vision/barcode-scanning.md +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Barcode Scanning -description: Get started with ML Kit Vision Barcode Scanning. -next: /ml-vision/image-labeling -previous: /ml-vision/landmark-recognition ---- - -Barcode scanning can read data encoded using most standard barcode formats. Barcode scanning happens on the device, -and doesn't require a network connection. It's a convenient way to pass information from the real world to your app. - -The Machine Learning service is only offered on the device, and no cloud service exists. - -Given an image file, the Barcode Scanning service will attempt to recognize one or more barcodes, offering information -such as: - -- The 4-point coordinates of the barcodes on the image. -- The type of barcode (e.g. a phone number, contact information, calendar invite etc). - -To view the full list of information available, view the [`VisionBarcode`](/reference/ml-vision/visionbarcode) documentation. - -# On-device Barcode Scanning - -## Enable the model - -To enable the mode, set the `ml_vision_barcode_model` key to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_vision_barcode_model": true - } -} -``` - -Once complete, rebuild your application: - -```bash -# For Android -npx react-native run-android - -# For iOS -cd ios/ && pod install --repo-update -npx react-native run-ios -``` - -## Process - -Once the model has been downloaded, call the `barcodeDetectorProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - -async function processBarcodes(localPath) { - const barcodes = await vision().barcodeDetectorProcessImage(localPath); - - barcodes.forEach(barcode => { - if (barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) { - console.log('Barcode is a calendar event: ', barcode.calendarEvent); - } - - if (barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - console.log('Barcode contains contact info: ', barcode.contactInfo); - } - - if (barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) { - console.log('Barcode contains drivers license info: ', barcode.driverLicense); - } - - if (barcode.valueType === VisionBarcodeValueType.EMAIL) { - console.log('Barcode contains email address info: ', barcode.email); - } - - if (barcode.valueType === VisionBarcodeValueType.GEO) { - console.log('Barcode contains location info: ', barcode.geoPoint); - } - - if (barcode.valueType === VisionBarcodeValueType.PHONE) { - console.log('Barcode contains phone number info: ', barcode.phone); - } - - if (barcode.valueType === VisionBarcodeValueType.SMS) { - console.log('Barcode contains SMS info: ', barcode.sms); - } - - if (barcode.valueType === VisionBarcodeValueType.URL) { - console.log('Barcode contains URL info: ', barcode.url); - } - - if (barcode.valueType === VisionBarcodeValueType.WIFI) { - console.log('Barcode contains WIFI info: ', barcode.wifi); - } - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/barcode-document.jpg`; - -processBarcodes(localFile).then(() => console.log('Finished processing file.')); -``` - -To learn about the types of information the barcode scanner can return, view the -[`VisionBarcode`](/reference/ml-vision/visionbarcode) documentation. diff --git a/docs/ml-vision/face-detection.md b/docs/ml-vision/face-detection.md deleted file mode 100644 index f6a3a8e057..0000000000 --- a/docs/ml-vision/face-detection.md +++ /dev/null @@ -1,83 +0,0 @@ ---- -title: Face Detection -description: Get started with ML Kit Vision Face Detection. -next: /remote-config/usage -previous: /ml-vision/face-detection ---- - -Face detection can detect faces in an image, identify key facial features, and get the contours of detected faces. -This provides information needed to perform tasks like embellishing selfies and portraits, or generating avatars -from a user's photo. - -The Machine Learning service is only offered on the device, and no cloud service exists. - -Given an image file, the Face Detection service will attempt to recognize one or more faces, offering information -such as: - -- Face contour coordinates. -- The rotation of the head/face along the Y & Z axis. -- The probability that the face has it's left/right eyes open. -- The probability that the face is smiling. -- A list of face features (e.g. eyes, nose, mouth etc) and their positions on the face. - -# On-device Face Detection - -## Enable the model - -To enable the mode, set the `ml_vision_face_model` key to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_vision_face_model": true - } -} -``` - -Once complete, rebuild your application: - -```bash -# For Android -npx react-native run-android - -# For iOS -cd ios/ && pod install --repo-update -npx react-native run-ios -``` - -## Process - -Once the model has been downloaded, call the `faceDetectorProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision, { VisionFaceContourType } from '@react-native-firebase/ml-vision'; - -async function processFaces(localPath) { - const faces = await vision().faceDetectorProcessImage(localPath); - - faces.forEach(face => { - console.log('Head rotation on Y axis: ', face.headEulerAngleY); - console.log('Head rotation on Z axis: ', face.headEulerAngleZ); - - console.log('Left eye open probability: ', face.leftEyeOpenProbability); - console.log('Right eye open probability: ', face.rightEyeOpenProbability); - console.log('Smiling probability: ', face.smilingProbability); - - face.faceContours.forEach(contour => { - if (contour.type === VisionFaceContourType.FACE) { - console.log('Face outline points: ', contour.points); - } - }); - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/barcode-document.jpg`; - -processBarcodes(localFile).then(() => console.log('Finished processing file.')); -``` - -To learn about the types of information the face detector can return, view the -[`VisionFace`](/reference/ml-vision/visionface) documentation. diff --git a/docs/ml-vision/image-labeling.md b/docs/ml-vision/image-labeling.md deleted file mode 100644 index c4f194bead..0000000000 --- a/docs/ml-vision/image-labeling.md +++ /dev/null @@ -1,106 +0,0 @@ ---- -title: Image Labeling -description: Get started with ML Kit Vision Image Labeling. -next: /ml-vision/face-detection -previous: /ml-vision/barcode-scanning ---- - -Image labeling can recognize entities in an image without having to provide any additional contextual metadata, using -either an on-device API or a cloud-based API. It gets a list of the entities that were recognized: people, things, places, -activities, and so on. - -# Cloud Image Labeling - -The cloud based image labeling service uploads a given image to the Firebase services, processes the results and returns them. -To get started, call the `cloudImageLabelerProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision from '@react-native-firebase/ml-vision'; - -async function processImage(localPath) { - const labels = await vision().cloudImageLabelerProcessImage(localPath); - - labels.forEach(label => { - console.log('Service labelled the image: ', label.text); - console.log('Confidence in the label: ', label.confidence); - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`; - -processImage(localFile).then(() => console.log('Finished processing file.')); -``` - -To learn more about the available data on a processed document, view the [`VisionImageLabel`](/reference/ml-vision/visionimagelabel) -documentation. - -## Configuration - -By default, the service will return labels with any confidence level, which may include labels you do not care about or -are too obvious. Set the `confidenceThreshold` key to a value between 0 & 1, where 1 represents 100% confidence. The -cloud service will only return labels with a confidence greater than what you specified: - -```js -const processed = await vision().cloudDocumentTextRecognizerProcessImage(localPath, { - // 80% or higher confidence labels only - confidenceThreshold: 0.8, -}); -``` - -View the [`VisionCloudImageLabelerOptions`](/reference/ml-vision/visioncloudimagelabeleroptions) documentation for more information. - -# On-device Image Labeling - -Running the ML Kit service on a device requires the `ml_vision_image_label_model` and `ml_vision_label_model` to be download to the device. Although the results -of on-device processing will be faster and more accurate, including the model in your application will increase the size -of the application. - -## Enable the model - -To enable the mode, set the `ml_vision_image_label_model` & `ml_vision_label_model` key to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_vision_image_label_model": true, - "ml_vision_label_model": true - } -} -``` - -Once complete, rebuild your application: - -```bash -# For Android -npx react-native run-android - -# For iOS -cd ios/ && pod install --repo-update -npx react-native run-ios -``` - -## Process - -Once the models have been downloaded, call the `imageLabelerProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision from '@react-native-firebase/ml-vision'; - -async function processImage(localPath) { - const labels = await vision().imageLabelerProcessImage(localPath); - - labels.forEach(label => { - console.log('Service labelled the image: ', label.text); - console.log('Confidence in the label: ', label.confidence); - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`; - -processImage(localFile).then(() => console.log('Finished processing file.')); -``` diff --git a/docs/ml-vision/index.md b/docs/ml-vision/index.md deleted file mode 100644 index 1260a10acb..0000000000 --- a/docs/ml-vision/index.md +++ /dev/null @@ -1,3 +0,0 @@ ---- -redirect: /ml-vision/usage ---- diff --git a/docs/ml-vision/text-recognition.md b/docs/ml-vision/text-recognition.md deleted file mode 100644 index 33244983f4..0000000000 --- a/docs/ml-vision/text-recognition.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -title: Text Recognition -description: Get started with ML Kit Vision Text Recognition. -next: /ml-vision/landmark-recognition -previous: /ml-vision/usage ---- - -Text recognition can automate tedious data entry for credit cards, receipts, and business cards. With the Cloud-based API, -you can also extract text from pictures of documents, which you can use to increase accessibility or translate documents. - -Once an image file has been processed, the API returns a [`VisionDocumentText`](/reference/ml-vision/visiondocumenttext), referencing -all found text along with each [`VisionDocumentTextBlock`](/reference/ml-vision/visiondocumenttextblock). Each block contains -meta-data such as: - -- The 4-point coordinates of the box on the document. -- Paragraphs within the block. -- Recognized languages within the block/document. -- The confidence the Machine Learning service has in it's own results. - -# Cloud Text Recognition - -The cloud based text recognition service uploads a given image of a document to the remote Firebase service which processes the results and returns them. Only image file types are allowed. -To get started, call the `cloudDocumentTextRecognizerProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision from '@react-native-firebase/ml-vision'; - -async function processDocument(localPath) { - const processed = await vision().cloudDocumentTextRecognizerProcessImage(localPath); - - console.log('Found text in document: ', processed.text); - - processed.blocks.forEach(block => { - console.log('Found block with text: ', block.text); - console.log('Confidence in block: ', block.confidence); - console.log('Languages found in block: ', block.recognizedLanguages); - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`; - -processDocument(localFile).then(() => console.log('Finished processing file.')); -``` - -To learn more about the available data on a processed document, view the [`VisionDocumentText`](/reference/ml-vision/visiondocumenttext) -documentation. - -## Configuration - -To help improve the results when using the cloud service, you can optionally provide arguments to the `cloudDocumentTextRecognizerProcessImage` -method: - -```js -const processed = await vision().cloudDocumentTextRecognizerProcessImage(documentPath, { - // The document contains Kurdish - languageHints: ['KU'], -}); -``` - -In most scenarios, not providing any hints will yield better results. Use this configuration if the cloud service is struggling -to detect a language. - -View the [`VisionCloudDocumentTextRecognizerOptions`](/reference/ml-vision/visionclouddocumenttextrecognizeroptions) documentation for more information. - -# On-device Text Recognition - -Running the ML Kit service on a device requires the `ml_vision_ocr_model` to be download to the device. Although the results -of on-device processing will be faster and more accurate, including the model in your application will increase the size -of the application. - -## Enable the model - -To enable the mode, set the `ml_vision_ocr_model` key to `true` in your `firebase.json` file: - -```json -// /firebase.json -{ - "react-native": { - "ml_vision_ocr_model": true - } -} -``` - -Once complete, rebuild your application: - -```bash -# For Android -npx react-native run-android - -# For iOS -cd ios/ && pod install --repo-update -npx react-native run-ios -``` - -## Process - -Once the model has been downloaded, call the `textRecognizerProcessImage` method with a path to a local file on your device: - -```js -import { utils } from '@react-native-firebase/app'; -import vision from '@react-native-firebase/ml-vision'; - -async function processDocument(localPath) { - const processed = await vision().textRecognizerProcessImage(localPath); - - console.log('Found text in document: ', processed.text); - - processed.blocks.forEach(block => { - console.log('Found block with text: ', block.text); - console.log('Confidence in block: ', block.confidence); - console.log('Languages found in block: ', block.recognizedLanguages); - }); -} - -// Local path to file on the device -const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`; - -processDocument(localFile).then(() => console.log('Finished processing file.')); -``` diff --git a/docs/ml-vision/usage/index.md b/docs/ml-vision/usage/index.md deleted file mode 100644 index fc9034fc59..0000000000 --- a/docs/ml-vision/usage/index.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -title: ML Kit Vision -description: Installation and getting started with ML Kit Vision. -icon: //static.invertase.io/assets/firebase/ml-kit.svg -next: /ml-vision/text-recognition -previous: /ml-natural-language/usage ---- - -# Installation - -This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the -[Getting Started](/) documentation. - -```bash -# Install & setup the app module -yarn add @react-native-firebase/app - -# Install the ml-vision module -yarn add @react-native-firebase/ml-vision - -# If you're developing your app using iOS, run this command -cd ios/ && pod install -``` - -If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project, -you can follow the manual installation steps for [iOS](/ml-vision/usage/installation/ios) and [Android](/ml-vision/usage/installation/android). - -# What does it do - -ML Kit Vision makes use of Firebase's Machine Learning Kit's [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text), -[Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces), [Barcode Scanning](https://firebase.google.com/docs/ml-kit/read-barcodes), -[Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) & [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) features. - -Depending on the service, it is possible to perform Machine Learning on both the local device or cloud. - - - -## Support table - -The table below outlines the current module support for each available service, and whether they are available on local device, -cloud or both. - -| API | Cloud Model | On Device | -| ------------------------------------------------------------------------------------- | ----------- | --------- | -| [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) | ✅ | ✅ | -| [Document Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text)) | ✅ | | -| [Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces) | | ✅ | -| [Barcode Scanning](https://firebase.google.com/docs/ml-kit/read-barcodes) | | ✅ | -| [Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) | ✅ | ✅ | -| [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) | | ✅ | -| [AutoML Vision Edge](https://firebase.google.com/docs/ml-kit/automl-image-labeling) | ❌ | ❌ | -| [Object Detection/Tracking](https://firebase.google.com/docs/ml-kit/object-detection) | ❌ | ❌ | - -# Usage - -To get started, you can find the documentation for the individual ML Kit Vision services below: - -- [Text Recognition](/ml-vision/text-recognition). -- [Landmark Recognition](/ml-vision/landmark-recognition). -- [Barcode Scanning](/ml-vision/barcode-scanning). -- [Image ](/ml-vision/image-labeling). -- [Face Detection](/ml-vision/face-detection). - -# firebase.json - -## Enabling models - -To be able to use the on-device Machine Learning models you'll need to enable them. This is possible by setting the below noted properties -on the `firebase.json` file at the root of your project directory. - -```json -// /firebase.json -{ - "react-native": { - // on device face detection - "ml_vision_face_model": true, - // on device text recognition - "ml_vision_ocr_model": true, - // on device barcode detection - "ml_vision_barcode_model": true, - - // on device image labeling - "ml_vision_label_model": true, - "ml_vision_image_label_model": true - } -} -``` - -The models are disabled by default to help control app size. - -Since only models enabled here will be compiled into the application, any changes to this file require a rebuild. - -```bash -# For Android -npx react-native run-android - -# For iOS -cd ios/ && pod install --repo-update -npx react-native run-ios -``` diff --git a/docs/ml/image-labeling.md b/docs/ml/image-labeling.md new file mode 100644 index 0000000000..4b91322b01 --- /dev/null +++ b/docs/ml/image-labeling.md @@ -0,0 +1,52 @@ +--- +title: Image Labeling +description: Get started with ML Image Labeling. +next: /remote-config/usage +previous: /ml/landmark-recognition +--- + +Image labeling can recognize entities in an image without having to provide any additional contextual metadata, using +either a cloud-based API. It gets a list of the entities that were recognized: people, things, places, +activities, and so on. + +# Cloud Image Labeling + +The cloud based image labeling service uploads a given image to the Firebase services, processes the results and returns them. +To get started, call the `cloudImageLabelerProcessImage` method with a path to a local file on your device: + +```js +import { utils } from '@react-native-firebase/app'; +import ml from '@react-native-firebase/ml'; + +async function processImage(localPath) { + const labels = await ml().cloudImageLabelerProcessImage(localPath); + + labels.forEach(label => { + console.log('Service labelled the image: ', label.text); + console.log('Confidence in the label: ', label.confidence); + }); +} + +// Local path to file on the device +const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/image-document.jpg`; + +processImage(localFile).then(() => console.log('Finished processing file.')); +``` + +To learn more about the available data on a processed document, view the [`MLImageLabel`](/reference/ml/mlimagelabel) +documentation. + +## Configuration + +By default, the service will return labels with any confidence level, which may include labels you do not care about or +are too obvious. Set the `confidenceThreshold` key to a value between 0 & 1, where 1 represents 100% confidence. The +cloud service will only return labels with a confidence greater than what you specified: + +```js +const processed = await ml().cloudDocumentTextRecognizerProcessImage(localPath, { + // 80% or higher confidence labels only + confidenceThreshold: 0.8, +}); +``` + +View the [`MLCloudImageLabelerOptions`](/reference/ml/mlcloudimagelabeleroptions) documentation for more information. diff --git a/docs/ml/index.md b/docs/ml/index.md new file mode 100644 index 0000000000..b9fa9915d1 --- /dev/null +++ b/docs/ml/index.md @@ -0,0 +1,3 @@ +--- +redirect: /ml/usage +--- diff --git a/docs/ml-vision/landmark-recognition.md b/docs/ml/landmark-recognition.md similarity index 68% rename from docs/ml-vision/landmark-recognition.md rename to docs/ml/landmark-recognition.md index cb8d1131b1..569f2c8d90 100644 --- a/docs/ml-vision/landmark-recognition.md +++ b/docs/ml/landmark-recognition.md @@ -1,15 +1,13 @@ --- title: Landmark Recognition -description: Get started with ML Kit Vision Landmark Recognition. -next: /ml-vision/barcode-scanning -previous: /ml-vision/text-recognition +description: Get started with ML Landmark Recognition. +next: /ml/image-labeling +previous: /ml/text-recognition --- Landmark recognition can recognize well-known landmarks in an image. It returns the landmarks that were recognized, along with each landmark's geographic coordinates and the region of the image the landmark was found. -The Machine Learning service is only offered as a cloud based one, and no on-device service exists. - Given an image file, the Landmark Recognition service will attempt to recognize one or more landmarks, offering information such as: @@ -25,13 +23,13 @@ To get started, call the `cloudLandmarkRecognizerProcessImage` method with a pat ```js import { utils } from '@react-native-firebase/app'; -import vision from '@react-native-firebase/ml-vision'; +import ml from '@react-native-firebase/ml'; async function processLandmarks(localPath) { - const landmarks = await vision().cloudLandmarkRecognizerProcessImage(localPath); + const landmarks = await ml().cloudLandmarkRecognizerProcessImage(localPath); - landmarks.forEach(visionLandmark => { - console.log('Landmark name: ', visionLandmark.landmark); + landmarks.forEach(landmark => { + console.log('Landmark name: ', landmark.landmark); console.log('Landmark locations: ', block.locations); console.log('Confidence score: ', block.confidence); }); @@ -49,17 +47,17 @@ To help speed up requests and improve results, the `cloudLandmarkRecognizerProce configuration object. ```js -import vision, { VisionCloudLandmarkRecognizerModelType } from '@react-native-firebase/ml-vision'; +import ml, { MLCloudLandmarkRecognizerModelType } from '@react-native-firebase/ml'; -const landmarks = await vision().cloudLandmarkRecognizerProcessImage(localPath, { +const landmarks = await ml().cloudLandmarkRecognizerProcessImage(localPath, { // Limit the results maxResults: 2, // Set the model type - modelType: VisionCloudLandmarkRecognizerModelType.LATEST_MODEL, + modelType: MLCloudLandmarkRecognizerModelType.LATEST_MODEL, }); ``` By default, the service will use a stable model to detect landmarks. However, if you feel results are not up-to-date, you can optionally use the latest model available. Results however may change unexpectedly. -View the [`VisionCloudLandmarkRecognizerOptions`](/reference/ml-vision/visioncloudlandmarkrecognizeroptions) documentation for more information. +View the [`MLCloudLandmarkRecognizerOptions`](/reference/ml/mlcloudlandmarkrecognizeroptions) documentation for more information. diff --git a/docs/ml/text-recognition.md b/docs/ml/text-recognition.md new file mode 100644 index 0000000000..638f1832a0 --- /dev/null +++ b/docs/ml/text-recognition.md @@ -0,0 +1,66 @@ +--- +title: Text Recognition +description: Get started with ML Kit Text Recognition. +next: /ml/landmark-recognition +previous: /ml/usage +--- + +Text recognition can automate tedious data entry for credit cards, receipts, and business cards. With the Cloud-based API, +you can also extract text from pictures of documents, which you can use to increase accessibility or translate documents. + +Once an image file has been processed, the API returns a [`MLDocumentText`](/reference/ml/mldocumenttext), referencing +all found text along with each [`MLDocumentTextBlock`](/reference/ml/mldocumenttextblock). Each block contains +meta-data such as: + +- The 4-point coordinates of the box on the document. +- Paragraphs within the block. +- Recognized languages within the block/document. +- The confidence the Machine Learning service has in it's own results. + +# Cloud Text Recognition + +The cloud based text recognition service uploads a given image of a document to the remote Firebase service which processes the results and returns them. Only image file types are allowed. +To get started, call the `cloudDocumentTextRecognizerProcessImage` method with a path to a local file on your device: + +```js +import { utils } from '@react-native-firebase/app'; +import ml from '@react-native-firebase/ml'; + +async function processDocument(localPath) { + const processed = await ml().cloudDocumentTextRecognizerProcessImage(localPath); + + console.log('Found text in document: ', processed.text); + + processed.blocks.forEach(block => { + console.log('Found block with text: ', block.text); + console.log('Confidence in block: ', block.confidence); + console.log('Languages found in block: ', block.recognizedLanguages); + }); +} + +// Local path to file on the device +const localFile = `${utils.FilePath.PICTURES_DIRECTORY}/text-document.jpg`; + +processDocument(localFile).then(() => console.log('Finished processing file.')); +``` + +To learn more about the available data on a processed document, view the [`MLDocumentText`](/reference/ml/mldocumenttext) +documentation. + +## Configuration + +To help improve the results when using the cloud service, you can optionally provide arguments to the `cloudDocumentTextRecognizerProcessImage` +method: + +```js +const processed = await ml().cloudDocumentTextRecognizerProcessImage(documentPath, { + // The document contains Kurdish + languageHints: ['KU'], +}); +``` + +In most scenarios, not providing any hints will yield better results. Use this configuration if the cloud service is struggling +to detect a language. + +View the [`MLCloudDocumentTextRecognizerOptions`](/reference/ml/mlclouddocumenttextrecognizeroptions) documentation for more information. + diff --git a/docs/ml/usage/index.md b/docs/ml/usage/index.md new file mode 100644 index 0000000000..34c250d60e --- /dev/null +++ b/docs/ml/usage/index.md @@ -0,0 +1,56 @@ +--- +title: ML +description: Installation and getting started with ML. +icon: //static.invertase.io/assets/firebase/ml-kit.svg +next: /ml/text-recognition +previous: /in-app-messaging/usage +--- + +# Installation + +This module requires that the `@react-native-firebase/app` module is already setup and installed. To install the "app" module, view the +[Getting Started](/) documentation. + +```bash +# Install & setup the app module +yarn add @react-native-firebase/app + +# Install the ml module +yarn add @react-native-firebase/ml + +# If you're developing your app using iOS, run this command +cd ios/ && pod install +``` + +If you're using an older version of React Native without autolinking support, or wish to integrate into an existing project, +you can follow the manual installation steps for [iOS](/ml/usage/installation/ios) and [Android](/ml/usage/installation/android). + +# What does it do + +ML makes use of Firebase Machine Learning's [Text Recognition](https://firebase.google.com/docs/ml/recognize-text), +[Image Labeling](https://firebase.google.com/docs/ml/label-images) & [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) features. + +All Firebase ML services are cloud-based, with on-device APIs handled by the new, separate [Google MLKit](https://developers.google.com/ml-kit/) (Usable in react-native +as a set of [react-native-mlkit modules](https://www.npmjs.com/org/react-native-mlkit)) + + + +## Support table + +The table below outlines the current module support for each available service, and their support status here + +| API | Cloud Model | +| --------------------------------------------------------------------------------- | ----------- | +| [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) | ✅ | +| [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text)) | ✅ | +| [Image Labeling](https://firebase.google.com/docs/ml/label-images) | ✅ | +| [AutoML Vision Edge](https://firebase.google.com/docs/ml/automl-image-labeling) | ❌ | +| [Object Detection/Tracking](https://firebase.google.com/docs/ml/object-detection) | ❌ | + +# Usage + +To get started, you can find the documentation for the individual ML Kit services below: + +- [Text Recognition](/ml/text-recognition) +- [Landmark Recognition](/ml/landmark-recognition) +- [Image](/ml/image-labeling) diff --git a/docs/ml-vision/usage/installation/android.md b/docs/ml/usage/installation/android.md similarity index 64% rename from docs/ml-vision/usage/installation/android.md rename to docs/ml/usage/installation/android.md index 433a46ba98..f3ff62890d 100644 --- a/docs/ml-vision/usage/installation/android.md +++ b/docs/ml/usage/installation/android.md @@ -1,8 +1,8 @@ --- title: Android Installation -description: Manually integrate ML Kit Vision into your Android application. -next: /ml-vision/usage/installation/ios -previous: /ml-vision/usage +description: Manually integrate ML into your Android application. +next: /ml/usage/installation/ios +previous: /ml/usage --- # Android Manual Installation @@ -15,8 +15,8 @@ auto-linking. Add the following to your projects `/android/settings.gradle` file: ```groovy -include ':@react-native-firebase_ml-vision' -project(':@react-native-firebase_ml-vision').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml-vision/android') +include ':@react-native-firebase_ml' +project(':@react-native-firebase_ml').projectDir = new File(rootProject.projectDir, './../node_modules/@react-native-firebase/ml/android') ``` ## 2. Update Gradle Dependencies @@ -27,7 +27,7 @@ Add the React Native Functions module dependency to your `/android/app/build.gra // .. dependencies { // .. - implementation project(path: ":@react-native-firebase_ml-vision") + implementation project(path: ":@react-native-firebase_ml") } ``` @@ -38,7 +38,7 @@ Import and apply the React Native Firebase module package to your `/android/app/ Import the package: ```java -import io.invertase.firebase.perf.ReactNativeFirebaseMLVisionPackage; +import io.invertase.firebase.perf.ReactNativeFirebaseMLPackage; ``` Add the package to the registry: @@ -47,7 +47,7 @@ Add the package to the registry: protected List getPackages() { return Arrays.asList( new MainReactPackage(), - new ReactNativeFirebaseMLVisionPackage(), + new ReactNativeFirebaseMLPackage(), ``` ## 4. Rebuild the project diff --git a/docs/ml-vision/usage/installation/ios.md b/docs/ml/usage/installation/ios.md similarity index 51% rename from docs/ml-vision/usage/installation/ios.md rename to docs/ml/usage/installation/ios.md index 119ef6f3e7..c11f45cef6 100644 --- a/docs/ml-vision/usage/installation/ios.md +++ b/docs/ml/usage/installation/ios.md @@ -1,8 +1,8 @@ --- title: iOS Installation -description: Manually integrate ML Kit Vision APIs into your iOS application. -next: /ml-vision/usage/installation/android -previous: /ml-vision/usage +description: Manually integrate ML APIs into your iOS application. +next: /ml/usage/installation/android +previous: /ml/usage --- # iOS Manual Installation @@ -12,18 +12,18 @@ auto-linking. ## 1. Add the Pod -Add the `RNFBMLVision` Pod to your projects `/ios/Podfile`: +Add the `RNFBML` Pod to your projects `/ios/Podfile`: ```ruby target 'app' do # ... - pod 'RNFBMLVision', :path => '../node_modules/@react-native-firebase/ml-vision' + pod 'RNFBML', :path => '../node_modules/@react-native-firebase/ml' end ``` ## 2. Update Pods & rebuild the project -You may need to update your local Pods in order for the `RNFBMLVision` Pod to be installed in your project: +You may need to update your local Pods in order for the `RNFBML` Pod to be installed in your project: ```bash $ cd /ios/ diff --git a/docs/releases/index.md b/docs/releases/index.md index e0e8ba2f00..6cdbbd55cf 100644 --- a/docs/releases/index.md +++ b/docs/releases/index.md @@ -21,8 +21,7 @@ Starting with version `v6.5.0`; all React Native Firebase packages are now indep | Dynamic Links | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/dynamic-links.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/dynamic-links/CHANGELOG.md) | | In-app Messaging | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/in-app-messaging.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/in-app-messaging/CHANGELOG.md) | | Instance ID | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/iid.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/iid/CHANGELOG.md) | -| ML Kit Natural Language | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/ml-natural-language.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml-natural-language/CHANGELOG.md) | -| ML Kit Vision | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/ml-vision.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml-vision/CHANGELOG.md) | +| ML | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/ml.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/ml/CHANGELOG.md) | | Performance Monitoring | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/perf.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/perf/CHANGELOG.md) | | Realtime Database | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/database.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/database/CHANGELOG.md) | | Remote Config | ![hide:badge](https://img.shields.io/npm/v/@react-native-firebase/remote-config.svg?style=for-the-badge&logo=npm) | [View Release Notes »](https://github.com/invertase/react-native-firebase/tree/master/packages/remote-config/CHANGELOG.md) | diff --git a/docs/releases/v6.0.0.md b/docs/releases/v6.0.0.md index f92fe30008..2c6bff3a21 100644 --- a/docs/releases/v6.0.0.md +++ b/docs/releases/v6.0.0.md @@ -26,8 +26,7 @@ The new modules: | [Dynamic Links](/dynamic-links) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/dynamic-links.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/dynamic-links) | [![badge](https://api.rnfirebase.io/coverage/dynamic-links/badge)](https://api.rnfirebase.io/coverage/dynamic-links/detail) | | [In-app Messaging](/in-app-messaging) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/in-app-messaging.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/in-app-messaging) | [![badge](https://api.rnfirebase.io/coverage/in-app-messaging/badge)](https://api.rnfirebase.io/coverage/in-app-messaging/detail) | | [Instance ID](/iid) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/iid.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/iid) | [![badge](https://api.rnfirebase.io/coverage/iid/badge)](https://api.rnfirebase.io/coverage/iid/detail) | -| [ML Kit Natural Language](/ml-natural-language) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml-natural-language.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml-natural-language) | [![badge](https://api.rnfirebase.io/coverage/ml-natural-language/badge)](https://api.rnfirebase.io/coverage/ml-natural-language/detail) | -| [ML Kit Vision ](/ml-vision) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml-vision.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml-vision) | [![badge](https://api.rnfirebase.io/coverage/ml-vision/badge)](https://api.rnfirebase.io/coverage/ml-vision/detail) | +| [ML](/ml) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/ml.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/ml) | [![badge](https://api.rnfirebase.io/coverage/ml/badge)](https://api.rnfirebase.io/coverage/ml/detail) | | [Performance Monitoring](/perf) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/perf.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/perf) | [![badge](https://api.rnfirebase.io/coverage/perf/badge)](https://api.rnfirebase.io/coverage/perf/detail) | | [Realtime Database](/database) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/database.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/database) | [![badge](https://api.rnfirebase.io/coverage/database/badge)](https://api.rnfirebase.io/coverage/database/detail) | | [Remote Config](/remote-config) | [![badge](https://img.shields.io/npm/dm/@react-native-firebase/remote-config.svg?style=for-the-badge&logo=npm)](https://www.npmjs.com/package/@react-native-firebase/remote-config) | [![badge](https://api.rnfirebase.io/coverage/remote-config/badge)](https://api.rnfirebase.io/coverage/remote-config/detail) | @@ -307,38 +306,17 @@ The Remote Config API has had a significant API change as originally highlighted --- -### ML Kit Natural Language (naturalLanguage) +### ML (Machine Learning) > This is a new module in React Native Firebase. - [NEW] Implemented support for language identification APIs - Single Languages: `identifyLanguage()`. - Multiple Languages: `identifyPossibleLanguages()` -- [NEW] Implemented support for [Smart Replies](https://firebase.google.com/docs/ml-kit/generate-smart-replies) - - [Example Video](https://twitter.com/mikediarmid/status/1128837402481635331) - -> ML Kit Translate APIs to come in a later release. - ---- - -### ML Kit Vision (vision) - -> This is a new module in React Native Firebase. - -- [NEW] Implemented support for [Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) Vision APIs; - - [x] Cloud - - [x] On Device -- [NEW] Implemented support for [Document Text Recognition](https://firebase.google.com/docs/ml-kit/recognize-text) Vision APIs; - - [x] Cloud -- [NEW] Implemented support for [Face Detection](https://firebase.google.com/docs/ml-kit/detect-faces) Vision APIs; - - [x] On Device -- [NEW] Implemented support for [Barcode Detection](https://firebase.google.com/docs/ml-kit/read-barcodes) Vision APIs; - - [x] On Device -- [NEW] Implemented support for [Image Labeling](https://firebase.google.com/docs/ml-kit/label-images) Vision APIs; - - [x] Cloud - - [x] On Device -- [NEW] Implemented support for [Landmark Recognition](https://firebase.google.com/docs/ml-kit/recognize-landmarks) Vision APIs; - - [x] Cloud +- [NEW] Implemented support for [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) Vision APIs; +- [NEW] Implemented support for [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text) Vision APIs; +- [NEW] Implemented support for [Image Labeling](https://firebase.google.com/docs/ml/label-images) Vision APIs; +- [NEW] Implemented support for [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) Vision APIs; --- diff --git a/docs/remote-config/usage/index.md b/docs/remote-config/usage/index.md index fbc993a61f..420960d285 100644 --- a/docs/remote-config/usage/index.md +++ b/docs/remote-config/usage/index.md @@ -3,7 +3,7 @@ title: Remote Config description: Installation and getting started with Remote Config. icon: //static.invertase.io/assets/firebase/remote-config.svg next: /perf/usage -previous: /ml-vision/face-detection +previous: /ml/image-labeling --- # Installation @@ -85,8 +85,10 @@ remoteConfig() .then(fetchedRemotely => { if (fetchedRemotely) { console.log('Configs were retrieved from the backend and activated.'); - } else { - console.log('No configs were fetched from the backend, and the local configs were already activated'); + } else { + console.log( + 'No configs were fetched from the backend, and the local configs were already activated', + ); } }); ``` @@ -121,11 +123,11 @@ The API also provides a `getAll` method to read all parameters at once rather th ```js const parameters = remoteConfig().getAll(); -Object.entries(parameters).forEach(($) => { +Object.entries(parameters).forEach($ => { const [key, entry] = $; - console.log('Key: ', key); - console.log('Source: ', entry.getSource()); - console.log('Value: ', entry.asString()); + console.log('Key: ', key); + console.log('Source: ', entry.getSource()); + console.log('Value: ', entry.asString()); }); ``` diff --git a/docs/sidebar.yaml b/docs/sidebar.yaml index dbb20064fc..7f33964a1e 100644 --- a/docs/sidebar.yaml +++ b/docs/sidebar.yaml @@ -103,23 +103,15 @@ - - - Usage - '/in-app-messaging/usage' - '//static.invertase.io/assets/firebase/in-app-messaging.svg' -- - ML Kit Natural Language +- - ML - - - Usage - - '/ml-natural-language/usage' - - '//static.invertase.io/assets/firebase/ml-kit.svg' -- - ML Kit Vision - - - - Usage - - '/ml-vision/usage' + - '/ml/usage' - - Text Recognition - - '/ml-vision/text-recognition' + - '/ml/text-recognition' - - Landmark Recognition - - '/ml-vision/landmark-recognition' - - - Barcode Scanning - - '/ml-vision/barcode-scanning' + - '/ml/landmark-recognition' - - Image Labeling - - '/ml-vision/image-labeling' - - - Face Detection - - '/ml-vision/face-detection' + - '/ml/image-labeling' - '//static.invertase.io/assets/firebase/ml-kit.svg' - - Remote Config - - - Usage diff --git a/packages/app/android/build.gradle b/packages/app/android/build.gradle index dc1e5cadc1..04a224f8dc 100644 --- a/packages/app/android/build.gradle +++ b/packages/app/android/build.gradle @@ -11,7 +11,7 @@ buildscript { } dependencies { - classpath("com.android.tools.build:gradle:4.0.1") + classpath("com.android.tools.build:gradle:4.1.0") } } } diff --git a/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java b/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java index 83f13be50a..cf6e909b90 100644 --- a/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java +++ b/packages/app/android/src/reactnative/java/io/invertase/firebase/utils/ReactNativeFirebaseUtilsModule.java @@ -161,8 +161,7 @@ public Map getConstants() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { - File folder = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS); - constants.put(KEY_DOCUMENT_DIRECTORY, folder.getAbsolutePath()); + constants.put(KEY_DOCUMENT_DIRECTORY, context.getExternalFilesDir(null).getAbsolutePath()); } else { constants.put(KEY_DOCUMENT_DIRECTORY, context.getFilesDir().getAbsolutePath()); } diff --git a/packages/app/lib/internal/constants.js b/packages/app/lib/internal/constants.js index 2db91325ec..3badd9d796 100644 --- a/packages/app/lib/internal/constants.js +++ b/packages/app/lib/internal/constants.js @@ -35,7 +35,7 @@ export const KNOWN_NAMESPACES = [ 'dynamicLinks', 'messaging', 'naturalLanguage', - 'vision', + 'ml', 'notifications', 'perf', 'utils', diff --git a/packages/app/package.json b/packages/app/package.json index e689a0e0ba..6f94d571ab 100644 --- a/packages/app/package.json +++ b/packages/app/package.json @@ -62,15 +62,14 @@ }, "sdkVersions": { "ios": { - "firebase": "~> 6.34.0" + "firebase": "~> 7.0.0" }, "android": { "minSdk": 16, - "targetSdk": 29, - "compileSdk": 29, - "buildTools": "29.0.3", - "firebase": "25.12.0", - "iid": "20.3.0", + "targetSdk": 30, + "compileSdk": 30, + "buildTools": "30.0.2", + "firebase": "26.0.0", "playServicesAuth": "18.1.0" } } diff --git a/packages/ml-natural-language/CHANGELOG.md b/packages/ml-natural-language/CHANGELOG.md deleted file mode 100644 index ffc9612fe1..0000000000 --- a/packages/ml-natural-language/CHANGELOG.md +++ /dev/null @@ -1,140 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - -## [7.4.11](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.10...@react-native-firebase/ml-natural-language@7.4.11) (2020-11-10) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.10](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.9...@react-native-firebase/ml-natural-language@7.4.10) (2020-10-30) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.9](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.8...@react-native-firebase/ml-natural-language@7.4.9) (2020-10-16) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.8](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.7...@react-native-firebase/ml-natural-language@7.4.8) (2020-09-30) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.7](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.6...@react-native-firebase/ml-natural-language@7.4.7) (2020-09-30) - -### Bug Fixes - -- **types:** enable TypeScript libCheck & resolve type conflicts ([#4306](https://github.com/invertase/react-native-firebase/issues/4306)) ([aa8ee8b](https://github.com/invertase/react-native-firebase/commit/aa8ee8b7e83443d2c1664993800e15faf4b59b0e)) - -## [7.4.6](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.5...@react-native-firebase/ml-natural-language@7.4.6) (2020-09-30) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.5](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.4...@react-native-firebase/ml-natural-language@7.4.5) (2020-09-17) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.4](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.3...@react-native-firebase/ml-natural-language@7.4.4) (2020-09-17) - -### Bug Fixes - -- **ios, podspec:** depend on React-Core instead of React ([#4275](https://github.com/invertase/react-native-firebase/issues/4275)) ([fd1a2be](https://github.com/invertase/react-native-firebase/commit/fd1a2be6b6ab1dec89e5dce1fc237435c3e1d510)) - -## [7.4.3](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.2...@react-native-firebase/ml-natural-language@7.4.3) (2020-09-11) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.1...@react-native-firebase/ml-natural-language@7.4.2) (2020-08-28) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.4.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.4.0...@react-native-firebase/ml-natural-language@7.4.1) (2020-08-26) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -# [7.4.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.2...@react-native-firebase/ml-natural-language@7.4.0) (2020-08-26) - -### Features - -- bump firebase sdk versions, add GoogleApi dep, use Android API29 ([#4122](https://github.com/invertase/react-native-firebase/issues/4122)) ([728f418](https://github.com/invertase/react-native-firebase/commit/728f41863832d21230c6eb1f55385284fef03c09)) - -## [7.3.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.1...@react-native-firebase/ml-natural-language@7.3.2) (2020-08-15) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.3.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.3.0...@react-native-firebase/ml-natural-language@7.3.1) (2020-08-03) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -# [7.3.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.2...@react-native-firebase/ml-natural-language@7.3.0) (2020-08-03) - -### Features - -- use latest android & ios Firebase SDKs version ([#3956](https://github.com/invertase/react-native-firebase/issues/3956)) ([e7b4bb3](https://github.com/invertase/react-native-firebase/commit/e7b4bb31b05985c044b1f01625a43e364bb653ef)) - -## [7.2.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.1...@react-native-firebase/ml-natural-language@7.2.2) (2020-07-09) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.2.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.2.0...@react-native-firebase/ml-natural-language@7.2.1) (2020-07-07) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -# [7.2.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.8...@react-native-firebase/ml-natural-language@7.2.0) (2020-07-07) - -### Features - -- **android,ios:** upgrade native SDK versions ([#3881](https://github.com/invertase/react-native-firebase/issues/3881)) ([6cb68a8](https://github.com/invertase/react-native-firebase/commit/6cb68a8ea808392fac3a28bdb1a76049c7b52e86)) - -## [7.1.8](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.7...@react-native-firebase/ml-natural-language@7.1.8) (2020-07-05) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.7](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.6...@react-native-firebase/ml-natural-language@7.1.7) (2020-06-30) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.6](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.5...@react-native-firebase/ml-natural-language@7.1.6) (2020-06-26) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.5](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.4...@react-native-firebase/ml-natural-language@7.1.5) (2020-06-22) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.4](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.3...@react-native-firebase/ml-natural-language@7.1.4) (2020-06-10) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.3](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.2...@react-native-firebase/ml-natural-language@7.1.3) (2020-06-03) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.2](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.1...@react-native-firebase/ml-natural-language@7.1.2) (2020-05-29) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.1.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.1.0...@react-native-firebase/ml-natural-language@7.1.1) (2020-05-29) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -# [7.1.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.1...@react-native-firebase/ml-natural-language@7.1.0) (2020-05-22) - -### Features - -- update native Firebase SDK versions ([#3663](https://github.com/invertase/react-native-firebase/issues/3663)) ([4db9dbc](https://github.com/invertase/react-native-firebase/commit/4db9dbc3ec20bf96de0efad15000f00b41e4a799)) - -## [7.0.1](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.0...@react-native-firebase/ml-natural-language@7.0.1) (2020-05-13) - -**Note:** Version bump only for package @react-native-firebase/ml-natural-language - -## [7.0.0](https://github.com/invertase/react-native-firebase/compare/@react-native-firebase/ml-natural-language@7.0.0...@react-native-firebase/ml-natural-language@7.0.0) (2020-05-13) - -- feat!: all packages should depend on core (#3613) ([252a423](https://github.com/invertase/react-native-firebase/commit/252a4239e98a0f2a55c4afcd2d82e4d5f97e65e9)), closes [#3613](https://github.com/invertase/react-native-firebase/issues/3613) - -### Features - -- **ios:** podspecs now utilize CoreOnly instead of Core ([#3575](https://github.com/invertase/react-native-firebase/issues/3575)) ([35285f1](https://github.com/invertase/react-native-firebase/commit/35285f1655b16d05e6630fc556f95cccfb707ee4)) - -### BREAKING CHANGES - -- breaking change to mark new internal versioning requirements. diff --git a/packages/ml-natural-language/README.md b/packages/ml-natural-language/README.md deleted file mode 100644 index eb42e7da45..0000000000 --- a/packages/ml-natural-language/README.md +++ /dev/null @@ -1,31 +0,0 @@ -

- -
-
-

React Native Firebase - ML Kit Natural Language

-

- ---- - -# DEPRECATED - -This package is deprecated and should no longer be used. - -Google has split mobile machine learning functionality into two pieces: - -1. "On-Device" inferences - this will be handled via the standalone ["Google ML Kit"](https://developers.google.com/ml-kit) libraries, and the related [`react-native-mlkit`](https://github.com/invertase/react-native-mlkit) package. This includes any APIs where the device uses a local model to make inferences - -1. "Cloud" inferences - these will continue in Firebase, but are now in the ["Firebase ML"](https://firebase.google.com/docs/ml) library, and will be available from the new consolidated `@react-native-firebase/ml` package - -More information on the transition is available here: https://firebase.google.com/docs/ml#cloud_vs_on-device - ---- - -

- -

- Built and maintained with 💛 by Invertase. -

-

- ---- diff --git a/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec b/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec deleted file mode 100644 index 761e423369..0000000000 --- a/packages/ml-natural-language/RNFBMLNaturalLanguage.podspec +++ /dev/null @@ -1,60 +0,0 @@ -require 'json' -require '../app/firebase_json' -package = JSON.parse(File.read(File.join(__dir__, 'package.json'))) -appPackage = JSON.parse(File.read(File.join('..', 'app', 'package.json'))) - -coreVersionDetected = appPackage['version'] -coreVersionRequired = package['peerDependencies'][appPackage['name']] -firebase_sdk_version = appPackage['sdkVersions']['ios']['firebase'] -if coreVersionDetected != coreVersionRequired - Pod::UI.warn "NPM package '#{package['name']}' depends on '#{appPackage['name']}' v#{coreVersionRequired} but found v#{coreVersionDetected}, this might cause build issues or runtime crashes." -end - -Pod::Spec.new do |s| - s.name = "RNFBMLNaturalLanguage" - s.version = package["version"] - s.description = package["description"] - s.summary = <<-DESC - A well tested feature rich Firebase implementation for React Native, supporting iOS & Android. - DESC - s.homepage = "http://invertase.io/oss/react-native-firebase" - s.license = package['license'] - s.authors = "Invertase Limited" - s.source = { :git => "https://github.com/invertase/react-native-firebase.git", :tag => "v#{s.version}" } - s.social_media_url = 'http://twitter.com/invertaseio' - s.ios.deployment_target = "9.0" - s.source_files = 'ios/**/*.{h,m}' - - # React Native dependencies - s.dependency 'React-Core' - s.dependency 'RNFBApp' - - if defined?($FirebaseSDKVersion) - Pod::UI.puts "#{s.name}: Using user specified Firebase SDK version '#{$FirebaseSDKVersion}'" - firebase_sdk_version = $FirebaseSDKVersion - end - - # Firebase dependencies - s.dependency 'Firebase/MLNaturalLanguage', firebase_sdk_version - - if FirebaseJSON::Config.get_value_or_default('ml_natural_language_language_id_model', false) - s.dependency 'Firebase/MLNLLanguageID', firebase_sdk_version - end - - # ignore until after v6 release, add support in a feature release - # if FirebaseJSON::Config.get_value_or_default('ml_natural_language_translate_model', false) - # s.dependency 'Firebase/MLNLTranslate', firebase_sdk_version - # end - - if FirebaseJSON::Config.get_value_or_default('ml_natural_language_smart_reply_model', false) - s.dependency 'Firebase/MLCommon', firebase_sdk_version - s.dependency 'Firebase/MLNLSmartReply', firebase_sdk_version - end - - if defined?($RNFirebaseAsStaticFramework) - Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'" - s.static_framework = $RNFirebaseAsStaticFramework - else - s.static_framework = false - end -end diff --git a/packages/ml-natural-language/android/build.gradle b/packages/ml-natural-language/android/build.gradle deleted file mode 100644 index 4d50f2f91a..0000000000 --- a/packages/ml-natural-language/android/build.gradle +++ /dev/null @@ -1,105 +0,0 @@ -import io.invertase.gradle.common.PackageJson - -buildscript { - // The Android Gradle plugin is only required when opening the android folder stand-alone. - // This avoids unnecessary downloads and potential conflicts when the library is included as a - // module dependency in an application project. - if (project == rootProject) { - repositories { - google() - jcenter() - } - - dependencies { - classpath("com.android.tools.build:gradle:4.0.1") - } - } -} - -plugins { - id "io.invertase.gradle.build" version "1.4" -} - -def appProject -if (findProject(':@react-native-firebase_app')) { - appProject = project(':@react-native-firebase_app') -} else if (findProject(':react-native-firebase_app')) { - appProject = project(':react-native-firebase_app') -} else { - throw new GradleException('Could not find the react-native-firebase/app package, have you installed it?') -} -def packageJson = PackageJson.getForProject(project) -def appPackageJson = PackageJson.getForProject(appProject) -def firebaseBomVersion = appPackageJson['sdkVersions']['android']['firebase'] -def jsonMinSdk = appPackageJson['sdkVersions']['android']['minSdk'] -def jsonTargetSdk = appPackageJson['sdkVersions']['android']['targetSdk'] -def jsonCompileSdk = appPackageJson['sdkVersions']['android']['compileSdk'] -def jsonBuildTools = appPackageJson['sdkVersions']['android']['buildTools'] -def coreVersionDetected = appPackageJson['version'] -def coreVersionRequired = packageJson['peerDependencies'][appPackageJson['name']] -// Only log after build completed so log warning appears at the end -if (coreVersionDetected != coreVersionRequired) { - gradle.buildFinished { - project.logger.warn("ReactNativeFirebase WARNING: NPM package '${packageJson['name']}' depends on '${appPackageJson['name']}' v${coreVersionRequired} but found v${coreVersionDetected}, this might cause build issues or runtime crashes.") - } -} - -project.ext { - set('react-native', [ - versions: [ - android : [ - minSdk : jsonMinSdk, - targetSdk : jsonTargetSdk, - compileSdk: jsonCompileSdk, - // optional as gradle.buildTools comes with one by default - // overriding here though to match the version RN uses - buildTools: jsonBuildTools - ], - - firebase: [ - bom: firebaseBomVersion, - ], - ], - ]) -} - -android { - defaultConfig { - multiDexEnabled true - } - aaptOptions { - noCompress "tflite" - } - lintOptions { - disable 'GradleCompatible' - abortOnError false - } - compileOptions { - sourceCompatibility JavaVersion.VERSION_1_8 - targetCompatibility JavaVersion.VERSION_1_8 - } - - sourceSets { - main { - java.srcDirs = ['src/main/java', 'src/reactnative/java'] - } - } -} - -repositories { - google() - jcenter() -} - -dependencies { - api appProject - implementation platform("com.google.firebase:firebase-bom:${ReactNative.ext.getVersion("firebase", "bom")}") - implementation "com.google.firebase:firebase-ml-natural-language" -} - -apply from: file("./ml-models.gradle") - -ReactNative.shared.applyPackageVersion() -ReactNative.shared.applyDefaultExcludes() -ReactNative.module.applyAndroidVersions() -ReactNative.module.applyReactNativeDependency("api") diff --git a/packages/ml-natural-language/android/ml-models.gradle b/packages/ml-natural-language/android/ml-models.gradle deleted file mode 100644 index f4a3786ccb..0000000000 --- a/packages/ml-natural-language/android/ml-models.gradle +++ /dev/null @@ -1,23 +0,0 @@ -apply from: file("./../../app/android/firebase-json.gradle") - -def mlModels = [ - // TODO not available on iOS until SDK 6.0.0 - // 'ml_natural_language_translate_model', - 'ml_natural_language_language_id_model', - 'ml_natural_language_smart_reply_model', -] - -dependencies { - if (rootProject.ext && rootProject.ext.firebaseJson) { - mlModels.each { modelFlag -> - if (rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) { - rootProject.logger.info ":${project.name} model enabled: '${modelFlag}'" - implementation "com.google.firebase:firebase-${modelFlag.replaceAll("_", "-")}" - } else { - rootProject.logger.warn ":${project.name} model disabled: '${modelFlag}'" - } - } - } else { - rootProject.logger.warn ":${project.name} skipping optional models as no firebaseJson extension found, you may be missing a firebase.json file in the root of your React Native project, or you've not installed the @react-native-firebase/app package and included it in your app build." - } -} diff --git a/packages/ml-natural-language/android/settings.gradle b/packages/ml-natural-language/android/settings.gradle deleted file mode 100644 index 9c9c7705f1..0000000000 --- a/packages/ml-natural-language/android/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -rootProject.name = '@react-native-firebase_ml-natural-language' diff --git a/packages/ml-natural-language/android/src/main/AndroidManifest.xml b/packages/ml-natural-language/android/src/main/AndroidManifest.xml deleted file mode 100644 index cc9b0e0efe..0000000000 --- a/packages/ml-natural-language/android/src/main/AndroidManifest.xml +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java deleted file mode 100644 index 60d7c326bb..0000000000 --- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageCommon.java +++ /dev/null @@ -1,96 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -import com.google.firebase.ml.common.FirebaseMLException; - -import javax.annotation.Nullable; - -public class UniversalFirebaseMLNaturalLanguageCommon { - - static String[] getErrorCodeAndMessageFromException(@Nullable Exception possibleMLException) { - String code = "unknown"; - String message = "An unknown error has occurred."; - - if (possibleMLException != null) { - message = possibleMLException.getMessage(); - if (possibleMLException instanceof FirebaseMLException) { - FirebaseMLException mlException = (FirebaseMLException) possibleMLException; - switch (mlException.getCode()) { - case FirebaseMLException.ABORTED: - code = "aborted"; - message = "The operation was aborted, typically due to a concurrency issue like transaction aborts, etc."; - break; - case FirebaseMLException.ALREADY_EXISTS: - code = "already-exists"; - message = "Some resource that we attempted to create already exists."; - break; - case FirebaseMLException.CANCELLED: - code = "cancelled"; - message = "The operation was cancelled (typically by the caller)."; - break; - case FirebaseMLException.DATA_LOSS: - code = "data-loss"; - message = "Unrecoverable data loss or corruption."; - break; - case FirebaseMLException.DEADLINE_EXCEEDED: - code = "deadline-exceeded"; - message = "Deadline expired before operation could complete."; - break; - case FirebaseMLException.FAILED_PRECONDITION: - code = "failed-precondition"; - message = "Operation was rejected because the system is not in a state required for the operation's execution."; - break; - case FirebaseMLException.INTERNAL: - code = "internal"; - message = "Internal errors."; - break; - case FirebaseMLException.INVALID_ARGUMENT: - code = "invalid-argument"; - message = "Client specified an invalid argument."; - break; - case FirebaseMLException.MODEL_HASH_MISMATCH: - code = "model-hash-mismatch"; - message = "The downloaded model's hash doesn't match the expected value."; - break; - case FirebaseMLException.MODEL_INCOMPATIBLE_WITH_TFLITE: - code = "model-incompatible-with-tflite"; - message = "The downloaded model isn't compatible with the TFLite runtime."; - break; - case FirebaseMLException.NOT_ENOUGH_SPACE: - code = "not-enough-space"; - message = "There is not enough space left on the device."; - break; - case FirebaseMLException.NOT_FOUND: - code = "not-found"; - message = "Some requested resource was not found."; - break; - case FirebaseMLException.OUT_OF_RANGE: - code = "out-of-range"; - message = "Operation was attempted past the valid range."; - break; - case FirebaseMLException.PERMISSION_DENIED: - code = "permission-denied"; - message = "The caller does not have permission to execute the specified operation."; - break; - case FirebaseMLException.RESOURCE_EXHAUSTED: - code = "resource-exhausted"; - message = "Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system is out of space."; - break; - case FirebaseMLException.UNAUTHENTICATED: - code = "unauthenticated"; - message = "The request does not have valid authentication credentials for the operation."; - break; - case FirebaseMLException.UNAVAILABLE: - code = "unavailable"; - message = "The service is currently unavailable."; - break; - case FirebaseMLException.UNIMPLEMENTED: - code = "unimplemented"; - message = "Operation is not implemented or not supported/enabled."; - break; - } - } - } - - return new String[]{code, message, possibleMLException != null ? possibleMLException.getMessage() : ""}; - } -} diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java deleted file mode 100644 index f87b72236b..0000000000 --- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageIdModule.java +++ /dev/null @@ -1,126 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import android.content.Context; -import android.os.Bundle; - -import com.google.android.gms.tasks.Task; -import com.google.android.gms.tasks.Tasks; -import com.google.firebase.FirebaseApp; -import com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage; -import com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentification; -import com.google.firebase.ml.naturallanguage.languageid.FirebaseLanguageIdentificationOptions; -import com.google.firebase.ml.naturallanguage.languageid.IdentifiedLanguage; -import com.google.firebase.ml.naturallanguage.translate.FirebaseTranslateLanguage; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import io.invertase.firebase.common.UniversalFirebaseModule; - -@SuppressWarnings("WeakerAccess") -class UniversalFirebaseMLNaturalLanguageIdModule extends UniversalFirebaseModule { - - UniversalFirebaseMLNaturalLanguageIdModule(Context context, String serviceName) { - super(context, serviceName); - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyLanguage(java.lang.String) - */ - public Task identifyLanguage( - String appName, - String text, - Bundle identificationOptionsBundle - ) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp); - - FirebaseLanguageIdentificationOptions identificationOptions = getOptions( - identificationOptionsBundle - ); - - FirebaseLanguageIdentification languageIdentification = naturalLanguage.getLanguageIdentification( - identificationOptions); - - return Tasks.await(languageIdentification.identifyLanguage(text)); - }); - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyPossibleLanguages(java.lang.String) - */ - public Task> identifyPossibleLanguages( - String appName, - String text, - Bundle identificationOptionsBundle - ) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp); - FirebaseLanguageIdentificationOptions identificationOptions = getOptions( - identificationOptionsBundle - ); - FirebaseLanguageIdentification languageIdentification = naturalLanguage.getLanguageIdentification( - identificationOptions); - - List languagesRaw = Tasks.await(languageIdentification.identifyPossibleLanguages( - text)); - - List formattedLanguages = new ArrayList<>(languagesRaw.size()); - - - for (IdentifiedLanguage identifiedLanguage : languagesRaw) { - Bundle formattedLanguage = new Bundle(2); - formattedLanguage.putString("language", identifiedLanguage.getLanguageCode()); - formattedLanguage.putFloat("confidence", identifiedLanguage.getConfidence()); - formattedLanguages.add(formattedLanguage); - } - - return formattedLanguages; - }); - - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentificationOptions.html - */ - private FirebaseLanguageIdentificationOptions getOptions( - Bundle identificationOptionsBundle - ) { - boolean multipleLanguages = identificationOptionsBundle.containsKey("multipleLanguages"); - FirebaseLanguageIdentificationOptions.Builder optionsBuilder = new FirebaseLanguageIdentificationOptions.Builder(); - - if (identificationOptionsBundle.containsKey("confidenceThreshold")) { - optionsBuilder.setConfidenceThreshold((float) identificationOptionsBundle.getDouble( - "confidenceThreshold")); - } else { - if (!multipleLanguages) { - optionsBuilder.setConfidenceThreshold(FirebaseLanguageIdentification.DEFAULT_IDENTIFY_LANGUAGE_CONFIDENCE_THRESHOLD); - } else { - optionsBuilder.setConfidenceThreshold(FirebaseLanguageIdentification.DEFAULT_IDENTIFY_POSSIBLE_LANGUAGES_CONFIDENCE_THRESHOLD); - } - } - - return optionsBuilder.build(); - } -} diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java deleted file mode 100644 index ff2ecb3dae..0000000000 --- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageSmartReplyModule.java +++ /dev/null @@ -1,108 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import android.content.Context; -import android.os.Bundle; -import com.google.android.gms.tasks.Task; -import com.google.android.gms.tasks.Tasks; -import com.google.firebase.FirebaseApp; -import com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage; -import com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage; -import com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestion; -import com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult; -import io.invertase.firebase.common.UniversalFirebaseModule; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -@SuppressWarnings({"WeakerAccess", "UnusedReturnValue"}) -class UniversalFirebaseMLNaturalLanguageSmartReplyModule extends UniversalFirebaseModule { - UniversalFirebaseMLNaturalLanguageSmartReplyModule(Context context, String serviceName) { - super(context, serviceName); - } - - @Override - public void onTearDown() { - super.onTearDown(); - } - - @SuppressWarnings("unchecked") - private List buildFirebaseTextMessagesList(List messages) { - List firebaseTextMessages = new ArrayList<>(messages.size()); - - for (Object message : messages) { - Map messageMap = (Map) message; - - Boolean isLocalUser = (Boolean) messageMap.get("isLocalUser"); - long timestamp = (long) ((double) messageMap.get("timestamp")); - String text = (String) messageMap.get("text"); - - if (isLocalUser) { - firebaseTextMessages.add( - FirebaseTextMessage.createForLocalUser( - text, - timestamp - ) - ); - } else { - firebaseTextMessages.add( - FirebaseTextMessage.createForRemoteUser( - text, - timestamp, - (String) messageMap.get("userId") - ) - ); - } - } - - return firebaseTextMessages; - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/smartreply/FirebaseSmartReply.html#public-tasksmartreplysuggestionresultsuggestreplieslistfirebasetextmessage-textmessages - */ - public Task> suggestReplies(String appName, List messages) { - return Tasks.call(getExecutor(), () -> { - List firebaseTextMessages = buildFirebaseTextMessagesList(messages); - FirebaseNaturalLanguage instance = FirebaseNaturalLanguage.getInstance(FirebaseApp.getInstance(appName)); - - SmartReplySuggestionResult suggestionResult = Tasks.await( - instance.getSmartReply().suggestReplies(firebaseTextMessages) - ); - - if (suggestionResult == null) return new ArrayList<>(0); - - List suggestedRepliesListRaw = suggestionResult.getSuggestions(); - List suggestedRepliesListFormatted = new ArrayList<>( - suggestedRepliesListRaw.size()); - - - for (SmartReplySuggestion suggestedReplyRaw : suggestedRepliesListRaw) { - Bundle suggestReplyFormatted = new Bundle(2); - suggestReplyFormatted.putString("text", suggestedReplyRaw.getText()); - // TODO no longer exists - undocumented breaking change - // suggestReplyFormatted.putFloat("confidence", suggestedReplyRaw.getConfidence()); - suggestedRepliesListFormatted.add(suggestReplyFormatted); - } - - return suggestedRepliesListFormatted; - }); - } -} diff --git a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java b/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java deleted file mode 100644 index 5377067c3e..0000000000 --- a/packages/ml-natural-language/android/src/main/java/io/invertase/firebase/ml/naturallanguage/UniversalFirebaseMLNaturalLanguageTranslateModule.java +++ /dev/null @@ -1,158 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import android.content.Context; -import io.invertase.firebase.common.UniversalFirebaseModule; - -@SuppressWarnings("WeakerAccess") -class UniversalFirebaseMLNaturalLanguageTranslateModule extends UniversalFirebaseModule { - UniversalFirebaseMLNaturalLanguageTranslateModule(Context context, String serviceName) { - super(context, serviceName); - } - - // TODO not available on iOS until SDK 6.0.0 -// /** -// * @url No reference documentation yet... -// */ -// public Task translate(String appName, String text, Bundle translationOptionsMap) { -// return Tasks.call(getExecutor(), () -> { -// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); -// FirebaseNaturalLanguage naturalLanguage = FirebaseNaturalLanguage.getInstance(firebaseApp); -// FirebaseTranslatorOptions translatorOptions = getOptions(translationOptionsMap); -// FirebaseTranslator translator = naturalLanguage.getTranslator(translatorOptions); -// return Tasks.await(translator.translate(text)); -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// public Task>> modelManagerGetAvailableModels(String appName) { -// return Tasks.call(getExecutor(), () -> { -// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); -// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance(); -// Set modelsRaw = Tasks.await(translateModelManager.getAvailableModels( -// firebaseApp)); -// -// List> modelsArray = new ArrayList<>(modelsRaw.size()); -// for (FirebaseTranslateRemoteModel modelRaw : modelsRaw) { -// Map modelMap = new HashMap<>(); -// modelMap.put("language", modelRaw.getLanguage()); -// modelMap.put("languageCode", modelRaw.getLanguageCode()); -// modelMap.put("backendModelName", modelRaw.getModelNameForBackend()); -// modelMap.put("persistUniqueModelName", modelRaw.getUniqueModelNameForPersist()); -// modelsArray.add(modelMap); -// } -// -// return modelsArray; -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// public Task modelManagerDeleteDownloadedModel(String appName, int language) { -// return Tasks.call(getExecutor(), () -> { -// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); -// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance(); -// FirebaseTranslateRemoteModel model = new FirebaseTranslateRemoteModel.Builder(language) -// .setFirebaseApp(firebaseApp) -// .build(); -// Tasks.await(translateModelManager.deleteDownloadedModel(model)); -// return null; -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// public Task modelManagerDownloadRemoteModelIfNeeded( -// String appName, -// int language, -// Bundle downloadConditionsBundle -// ) { -// return Tasks.call(getExecutor(), () -> { -// FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); -// FirebaseTranslateModelManager translateModelManager = FirebaseTranslateModelManager.getInstance(); -// FirebaseModelDownloadConditions downloadConditions = getDownloadConditions( -// downloadConditionsBundle); -// FirebaseTranslateRemoteModel model = new FirebaseTranslateRemoteModel.Builder(language) -// .setDownloadConditions(downloadConditions) -// .setFirebaseApp(firebaseApp) -// .build(); -// Tasks.await(translateModelManager.downloadRemoteModelIfNeeded(model)); -// return null; -// }); -// } -// -// private FirebaseModelDownloadConditions getDownloadConditions(Bundle downloadConditionsBundle) { -// FirebaseModelDownloadConditions.Builder conditionsBuilder = new FirebaseModelDownloadConditions.Builder(); -// -// if (downloadConditionsBundle.containsKey("requireCharging") && downloadConditionsBundle.getBoolean( -// "requireCharging")) { -// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { -// conditionsBuilder.requireCharging(); -// } -// } -// -// if (downloadConditionsBundle.containsKey("requireDeviceIdle") && downloadConditionsBundle.getBoolean( -// "requireDeviceIdle")) { -// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { -// conditionsBuilder.requireDeviceIdle(); -// } -// } -// -// if (downloadConditionsBundle.containsKey("requireWifi") && downloadConditionsBundle.getBoolean( -// "requireWifi")) { -// conditionsBuilder.requireWifi(); -// } -// -// return conditionsBuilder.build(); -// } -// -// private FirebaseTranslatorOptions getOptions(Bundle translationOptionsBundle) { -// FirebaseTranslatorOptions.Builder optionsBuilder = new FirebaseTranslatorOptions.Builder(); -// -// if (translationOptionsBundle.containsKey("sourceLanguage")) { -// optionsBuilder.setSourceLanguage((int) ((double) translationOptionsBundle.get("sourceLanguage"))); -// } else { -// optionsBuilder.setSourceLanguage(FirebaseTranslateLanguage.EN); -// } -// -// if (translationOptionsBundle.containsKey("targetLanguage")) { -// optionsBuilder.setTargetLanguage((int) ((double) translationOptionsBundle.get("targetLanguage"))); -// } else { -// optionsBuilder.setTargetLanguage(FirebaseTranslateLanguage.EN); -// } -// -// return optionsBuilder.build(); -// } -// -// @Override -// public Map getConstants() { -// Map constantsMap = new HashMap<>(); -// Map languagesMap = new HashMap<>(); -// Set languages = FirebaseTranslateLanguage.getAllLanguages(); -// for (Integer language : languages) { -// languagesMap.put(FirebaseTranslateLanguage.languageCodeForLanguage(language), language); -// } -// constantsMap.put("TRANSLATE_LANGUAGES", languagesMap); -// return constantsMap; -// } -} diff --git a/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml b/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml deleted file mode 100644 index cc9b0e0efe..0000000000 --- a/packages/ml-natural-language/android/src/reactnative/AndroidManifest.xml +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java deleted file mode 100644 index 0fdc2d31a7..0000000000 --- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageIdModule.java +++ /dev/null @@ -1,94 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.bridge.Arguments; -import com.facebook.react.bridge.Promise; -import com.facebook.react.bridge.ReactApplicationContext; -import com.facebook.react.bridge.ReactMethod; -import com.facebook.react.bridge.ReadableMap; - -import java.util.Objects; - -import io.invertase.firebase.common.ReactNativeFirebaseModule; - -class RNFirebaseMLNaturalLanguageIdModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLNaturalLanguageId"; - private final UniversalFirebaseMLNaturalLanguageIdModule module; - - RNFirebaseMLNaturalLanguageIdModule(ReactApplicationContext reactContext) { - super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLNaturalLanguageIdModule(reactContext, SERVICE_NAME); - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyLanguage(java.lang.String) - */ - @ReactMethod - public void identifyLanguage( - String appName, - String text, - ReadableMap identificationOptionsMap, - Promise promise - ) { - module - .identifyLanguage(appName, text, Arguments.toBundle(identificationOptionsMap)) - .addOnCompleteListener(task -> { - if (task.isSuccessful()) { - promise.resolve(task.getResult()); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/languageid/FirebaseLanguageIdentification.html#identifyPossibleLanguages(java.lang.String) - */ - @ReactMethod - public void identifyPossibleLanguages( - String appName, - String text, - ReadableMap identificationOptionsMap, - Promise promise - ) { - module - .identifyPossibleLanguages(appName, text, Arguments.toBundle(identificationOptionsMap)) - .addOnCompleteListener(task -> { - if (task.isSuccessful()) { - promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult()))); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } -} diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java deleted file mode 100644 index 0b3405ce74..0000000000 --- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageSmartReplyModule.java +++ /dev/null @@ -1,65 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.bridge.*; -import io.invertase.firebase.common.ReactNativeFirebaseModule; - -import java.util.Objects; - -class RNFirebaseMLNaturalLanguageSmartReplyModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLNaturalLanguageSmartReply"; - private final UniversalFirebaseMLNaturalLanguageSmartReplyModule module; - - RNFirebaseMLNaturalLanguageSmartReplyModule(ReactApplicationContext reactContext) { - super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLNaturalLanguageSmartReplyModule( - reactContext, - SERVICE_NAME - ); - } - - @Override - public void onCatalystInstanceDestroy() { - super.onCatalystInstanceDestroy(); - module.onTearDown(); - } - - /** - * @url https://firebase.google.com/docs/reference/android/com/google/firebase/ml/naturallanguage/smartreply/FirebaseSmartReply.html#public-tasksmartreplysuggestionresultsuggestreplieslistfirebasetextmessage-textmessages - */ - @ReactMethod - public void suggestReplies(String appName, ReadableArray messages, Promise promise) { - module - .suggestReplies(appName, messages.toArrayList()) - .addOnCompleteListener(getExecutor(), task -> { - if (task.isSuccessful()) { - promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult()))); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } -} diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java b/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java deleted file mode 100644 index b81b861a47..0000000000 --- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/RNFirebaseMLNaturalLanguageTranslateModule.java +++ /dev/null @@ -1,137 +0,0 @@ -package io.invertase.firebase.ml.naturallanguage; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.bridge.ReactApplicationContext; -import io.invertase.firebase.common.ReactNativeFirebaseModule; - -class RNFirebaseMLNaturalLanguageTranslateModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLNaturalLanguageTranslate"; - private final UniversalFirebaseMLNaturalLanguageTranslateModule module; - - RNFirebaseMLNaturalLanguageTranslateModule(ReactApplicationContext reactContext) { - super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLNaturalLanguageTranslateModule(reactContext, SERVICE_NAME); - } - -// TODO not available on iOS until SDK 6.0.0 - -// /** -// * @url No reference documentation yet... -// */ -// @ReactMethod -// public void translate( -// String appName, -// String text, -// ReadableMap translationOptionsMap, -// Promise promise -// ) { -// module -// .translate(appName, text, Arguments.toBundle(translationOptionsMap)) -// .addOnCompleteListener(task -> { -// if (task.isSuccessful()) { -// promise.resolve(task.getResult()); -// } else { -// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( -// task.getException()); -// rejectPromiseWithCodeAndMessage( -// promise, -// errorCodeAndMessage[0], -// errorCodeAndMessage[1], -// errorCodeAndMessage[2] -// ); -// } -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// @ReactMethod -// public void modelManagerGetAvailableModels(String appName, Promise promise) { -// module.modelManagerGetAvailableModels(appName).addOnCompleteListener(task -> { -// if (task.isSuccessful()) { -// promise.resolve(Arguments.fromList(Objects.requireNonNull(task.getResult()))); -// } else { -// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( -// task.getException()); -// rejectPromiseWithCodeAndMessage( -// promise, -// errorCodeAndMessage[0], -// errorCodeAndMessage[1], -// errorCodeAndMessage[2] -// ); -// } -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// @ReactMethod -// public void modelManagerDeleteDownloadedModel(String appName, int language, Promise promise) { -// module.modelManagerDeleteDownloadedModel(appName, language).addOnCompleteListener(task -> { -// if (task.isSuccessful()) { -// promise.resolve(task.getResult()); -// } else { -// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( -// task.getException()); -// rejectPromiseWithCodeAndMessage( -// promise, -// errorCodeAndMessage[0], -// errorCodeAndMessage[1], -// errorCodeAndMessage[2] -// ); -// } -// }); -// } -// -// /** -// * @url No reference documentation yet... -// */ -// @ReactMethod -// public void modelManagerDownloadRemoteModelIfNeeded( -// String appName, -// int language, -// ReadableMap downloadConditionsMap, -// Promise promise -// ) { -// module -// .modelManagerDownloadRemoteModelIfNeeded(appName, language, Arguments.toBundle(downloadConditionsMap)) -// .addOnCompleteListener(task -> { -// if (task.isSuccessful()) { -// promise.resolve(task.getResult()); -// } else { -// String[] errorCodeAndMessage = UniversalFirebaseMLNaturalLanguageCommon.getErrorCodeAndMessageFromException( -// task.getException()); -// rejectPromiseWithCodeAndMessage( -// promise, -// errorCodeAndMessage[0], -// errorCodeAndMessage[1], -// errorCodeAndMessage[2] -// ); -// } -// }); -// } -// -// -// @Override -// public Map getConstants() { -// return module.getConstants(); -// } -} diff --git a/packages/ml-natural-language/e2e/languageId.e2e.js b/packages/ml-natural-language/e2e/languageId.e2e.js deleted file mode 100644 index ae7c8002aa..0000000000 --- a/packages/ml-natural-language/e2e/languageId.e2e.js +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -describe('naturalLanguage() -> Language ID', () => { - describe('identifyLanguage()', () => { - it('returns a string of the identified language', async () => { - const languageDe = await firebase.naturalLanguage().identifyLanguage('Hallo welt'); - const languageEn = await firebase.naturalLanguage().identifyLanguage('Hello world'); - const languageFr = await firebase.naturalLanguage().identifyLanguage('Bonjour le monde'); - should.equal(languageDe, 'de'); - should.equal(languageEn, 'en'); - should.equal(languageFr, 'fr'); - }); - - it('accepts a `confidenceThreshold` option', async () => { - const languageDeDefault = await firebase.naturalLanguage().identifyLanguage('Hallo'); - const languageDeLowConfidence = await firebase.naturalLanguage().identifyLanguage('Hallo', { - confidenceThreshold: 0.2, - }); - should.equal(languageDeDefault, 'und'); - should.equal(languageDeLowConfidence, 'de'); - }); - - it('throws an error if text is not a string', async () => { - try { - firebase.naturalLanguage().identifyLanguage(false); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql('must be a string value'); - return Promise.resolve(); - } - }); - - it('throws an error if options is not an object', async () => { - try { - firebase.naturalLanguage().identifyLanguage('hello', false); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql('must be an object'); - return Promise.resolve(); - } - }); - - it('throws an error if options.confidenceThreshold is not a float value', async () => { - try { - firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: 'boop' }); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql('must be a float value between 0 and 1'); - return Promise.resolve(); - } - }); - - it('throws an error if options.confidenceThreshold is greater than 1', async () => { - try { - firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: 1.2 }); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql('must be a float value between 0 and 1'); - return Promise.resolve(); - } - }); - - it('throws an error if options.confidenceThreshold is less than 0', async () => { - try { - firebase.naturalLanguage().identifyLanguage('hello', { confidenceThreshold: -1.2 }); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql('must be a float value between 0 and 1'); - return Promise.resolve(); - } - }); - }); - - describe('identifyPossibleLanguages()', () => { - it('returns an array of the identified languages and their confidence', async () => { - const languages = await firebase.naturalLanguage().identifyPossibleLanguages('hello'); - languages.should.be.an.Array(); - languages.length.should.be.greaterThan(3); - languages[0].language.should.equal('en'); - languages[0].confidence.should.be.a.Number(); - languages[0].confidence.should.be.greaterThan(0.7); - }); - - it('accepts a `confidenceThreshold` option', async () => { - const languages = await firebase.naturalLanguage().identifyPossibleLanguages('hello', { - confidenceThreshold: 0.7, - }); - languages.should.be.an.Array(); - languages.length.should.equal(1); - languages[0].language.should.equal('en'); - languages[0].confidence.should.be.a.Number(); - languages[0].confidence.should.be.greaterThan(0.7); - }); - // arg validation not required, uses same validator as identifyLanguage - }); -}); diff --git a/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js b/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js deleted file mode 100644 index 7a10484e3a..0000000000 --- a/packages/ml-natural-language/e2e/mlKitLanguage.e2e.js +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -describe('naturalLanguage()', () => { - describe('namespace', () => { - it('accessible from firebase.app()', () => { - const app = firebase.app(); - should.exist(app.naturalLanguage); - app.naturalLanguage().app.should.equal(app); - }); - - it('supports multiple apps', async () => { - firebase.naturalLanguage().app.name.should.equal('[DEFAULT]'); - firebase - .naturalLanguage(firebase.app('secondaryFromNative')) - .app.name.should.equal('secondaryFromNative'); - - firebase - .app('secondaryFromNative') - .naturalLanguage() - .app.name.should.equal('secondaryFromNative'); - }); - - it('throws an error if language id native module does not exist', async () => { - const method = firebase.naturalLanguage().native.identifyLanguage; - firebase.naturalLanguage()._nativeModule = Object.assign( - {}, - firebase.naturalLanguage()._nativeModule, - ); - delete firebase.naturalLanguage()._nativeModule.identifyLanguage; - try { - firebase.naturalLanguage().identifyLanguage(); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql( - "You attempted to use an optional API that's not enabled natively", - ); - e.message.should.containEql('Language Identification'); - firebase.naturalLanguage()._nativeModule.identifyLanguage = method; - Object.freeze(firebase.naturalLanguage()._nativeModule); - return Promise.resolve(); - } - }); - - xit('throws an error if smart replies native module does not exist', async () => { - const method = firebase.naturalLanguage().native.getSuggestedReplies; - firebase.naturalLanguage()._nativeModule = Object.assign( - {}, - firebase.naturalLanguage()._nativeModule, - ); - delete firebase.naturalLanguage()._nativeModule.getSuggestedReplies; - try { - firebase.naturalLanguage().newSmartReplyConversation(); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql( - "You attempted to use an optional API that's not enabled natively", - ); - e.message.should.containEql('Smart Replies'); - firebase.naturalLanguage()._nativeModule.getSuggestedReplies = method; - Object.freeze(firebase.naturalLanguage()._nativeModule); - return Promise.resolve(); - } - }); - }); -}); diff --git a/packages/ml-natural-language/e2e/smartReply.e2e.js b/packages/ml-natural-language/e2e/smartReply.e2e.js deleted file mode 100644 index 061d8b6474..0000000000 --- a/packages/ml-natural-language/e2e/smartReply.e2e.js +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -describe('naturalLanguage() -> Smart Replies', () => { - describe('suggestReplies()', () => { - it('throws if messages is not an array', () => { - try { - firebase.naturalLanguage().suggestReplies({}); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'messages' must be an array value"); - return Promise.resolve(); - } - }); - - it('resolves an empty array if empty array if provided', async () => { - const replies = await firebase.naturalLanguage().suggestReplies([]); - replies.should.be.Array(); - replies.length.should.eql(0); - }); - - it('returns suggested replies', async () => { - const replies = await firebase.naturalLanguage().suggestReplies([ - { text: 'We should catchup some time!' }, - { text: 'I know right, it has been a while..', userId: 'invertase', isLocalUser: false }, - { text: 'Lets meet up!' }, - { - text: 'Definitely, how about we go for lunch this week?', - userId: 'invertase', - isLocalUser: false, - }, - ]); - - replies.should.be.Array(); - replies.length.should.equal(3); - - replies.forEach($ => { - $.text.should.be.String(); - $.text.length.should.be.greaterThan(0); - }); - - const replies2 = await firebase - .naturalLanguage() - .suggestReplies([ - { text: replies[0].text }, - { text: 'Great, does Friday work for you?', userId: 'invertase', isLocalUser: false }, - ]); - - replies2[0].text.should.be.String(); - replies2[0].text.length.should.be.greaterThan(0); - }); - - describe('TextMessage', () => { - it('throws if message is not an object', () => { - try { - firebase.naturalLanguage().suggestReplies([123]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage' expected an object value"); - return Promise.resolve(); - } - }); - - describe('.text', () => { - it('throws if text option not provided', () => { - try { - firebase.naturalLanguage().suggestReplies([{}]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.text' expected a string value"); - return Promise.resolve(); - } - }); - - it('throws if text option is not a string', () => { - try { - firebase.naturalLanguage().suggestReplies([{ text: 123 }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.text' expected a string value"); - return Promise.resolve(); - } - }); - - it('throws if text length is zero', () => { - try { - firebase.naturalLanguage().suggestReplies([{ text: '' }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.text' expected string value to not be empty"); - return Promise.resolve(); - } - }); - }); - - describe('.userId', () => { - it('throws if local user true and id provided', () => { - try { - firebase.naturalLanguage().suggestReplies([{ text: 'foo', userId: 'bar' }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql( - "'textMessage.userId' expected 'textMessage.isLocalUser' to be false when setting a user ID", - ); - return Promise.resolve(); - } - }); - - it('throws if text userId not provided', () => { - try { - firebase.naturalLanguage().suggestReplies([{ text: 'foo', isLocalUser: false }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.userId' expected a string value"); - return Promise.resolve(); - } - }); - - it('throws if userId option is not a string', () => { - try { - firebase - .naturalLanguage() - .suggestReplies([{ text: 'foo', isLocalUser: false, userId: 123 }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.userId' expected a string value"); - return Promise.resolve(); - } - }); - - it('throws if userId length is zero', () => { - try { - firebase - .naturalLanguage() - .suggestReplies([{ text: 'foo', isLocalUser: false, userId: '' }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql( - "'textMessage.userId' expected string value to not be empty", - ); - return Promise.resolve(); - } - }); - - it('sets a user id', () => { - firebase - .naturalLanguage() - .suggestReplies([{ text: 'foo', isLocalUser: false, userId: 'bar' }]); - }); - }); - - describe('.timestamp', () => { - it('throws if timestamp is not a number', () => { - try { - firebase.naturalLanguage().suggestReplies([{ text: 'foo', timestamp: 'baz' }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.timestamp' expected number value"); - return Promise.resolve(); - } - }); - - it('sets a timestamp', () => { - firebase.naturalLanguage().suggestReplies([{ text: 'foo', timestamp: Date.now() + 123 }]); - }); - }); - - describe('.isLocalUser', () => { - it('throws if isLocalUser is not a boolean', () => { - try { - firebase - .naturalLanguage() - .suggestReplies([{ text: 'foo', userId: 'bar', isLocalUser: 'baz' }]); - return Promise.reject(new Error('Did not throw')); - } catch (e) { - e.message.should.containEql("'textMessage.isLocalUser' expected boolean value"); - return Promise.resolve(); - } - }); - }); - }); - }); -}); diff --git a/packages/ml-natural-language/e2e/translate.e2e.js b/packages/ml-natural-language/e2e/translate.e2e.js deleted file mode 100644 index 71b19bf61f..0000000000 --- a/packages/ml-natural-language/e2e/translate.e2e.js +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -// TODO not available on iOS until SDK 6.0.0 -// xdescribe('naturalLanguage() -> Translate', () => { -// before(async () => { -// await firebase.naturalLanguage().translateModelManager.downloadRemoteModelIfNeeded('de'); -// }); -// -// describe('translate()', () => { -// it('translates test from the specified sourceLanguage to targetLanguage', async () => { -// const translatedText = await firebase -// .naturalLanguage() -// .translate('Hello world', { sourceLanguage: 'en', targetLanguage: 'de' }); -// translatedText.should.equal('Hallo Welt'); -// }); -// }); -// -// describe('translateModelManager()', () => { -// it('returns a new instance of TranslateModelManager', async () => { -// const { translateModelManager } = firebase.naturalLanguage(); -// translateModelManager.should.be.instanceOf( -// jet.require('packages/ml-natural-language/lib/TranslateModelManager'), -// ); -// }); -// }); -// -// describe('TranslateModelManager', () => { -// describe('downloadRemoteModelIfNeeded()', () => { -// it('downloads the specified language model', async () => { -// const { translateModelManager } = firebase.naturalLanguage(); -// await translateModelManager.downloadRemoteModelIfNeeded('de'); -// }); -// }); -// }); -// }); diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h deleted file mode 100644 index b6b6eb1a56..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.h +++ /dev/null @@ -1,27 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import - -@interface RCTConvert (FIRLanguageIdentificationOptions) -#if __has_include() - -+ (FIRLanguageIdentificationOptions *)firLanguageIdOptionsFromDict:(NSDictionary *)options; - -#endif -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m deleted file mode 100644 index 04bab8bdcb..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRLanguageIdentificationOptions.m +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import "RCTConvert+FIRLanguageIdentificationOptions.h" - -@implementation RCTConvert (FIRApp) -#if __has_include() - -+ (FIRLanguageIdentificationOptions *)firLanguageIdOptionsFromDict:(NSDictionary *)options { - if (options[@"confidenceThreshold"] == nil) { - if (options[@"multipleLanguages"] != nil) { - return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:FIRDefaultIdentifyPossibleLanguagesConfidenceThreshold]; - } else { - return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:FIRDefaultIdentifyLanguageConfidenceThreshold]; - } - } - - float confidenceThreshold = [options[@"confidenceThreshold"] floatValue]; - return [[FIRLanguageIdentificationOptions alloc] initWithConfidenceThreshold:confidenceThreshold]; -} - -RCT_CUSTOM_CONVERTER(FIRLanguageIdentificationOptions *, FIRLanguageIdentificationOptions, [self firLanguageIdOptionsFromDict:[self NSDictionary:json]]); -#endif -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h deleted file mode 100644 index 406e0c24b4..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import - -#import - -@interface RCTConvert (FIRTextMessageArray) -#if __has_include() -+ (FIRTextMessage *)FIRTextMessage:(id)json; -#endif -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m deleted file mode 100644 index 3641b77004..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.m +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import "RCTConvert+FIRTextMessageArray.h" - -@implementation RCTConvert (FIRTextMessageArray) -#if __has_include() -+ (FIRTextMessage *)FIRTextMessage:(id)json { - NSDictionary *messageDict = [self NSDictionary:json]; - FIRTextMessage *firTextMessage = [ - [FIRTextMessage alloc] - initWithText:messageDict[@"text"] - timestamp:[[messageDict valueForKey:@"timestamp"] doubleValue] - userID:messageDict[@"userId"] ? messageDict[@"userId"] : @"" - isLocalUser:messageDict[@"isLocalUser"] ? YES : NO - ]; - return firTextMessage; -} - -RCT_ARRAY_CONVERTER(FIRTextMessage) -#endif -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m deleted file mode 100644 index 5c129d16a8..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m +++ /dev/null @@ -1,95 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import "RNFBMLNaturalLanguageIdModule.h" -#if __has_include() - -#import -#import "RNFBSharedUtils.h" - - -#define DEPENDENCY_EXISTS=1 -#endif - - -@implementation RNFBMLNaturalLanguageIdModule -#pragma mark - -#pragma mark Module Setup - -RCT_EXPORT_MODULE(); - -#pragma mark - -#pragma mark Firebase Mlkit Language Id Methods - -#ifdef DEPENDENCY_EXISTS - -RCT_EXPORT_METHOD(identifyLanguage: - (FIRApp *) firebaseApp - : (NSString *)text - : (FIRLanguageIdentificationOptions *)identificationOptions - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - FIRNaturalLanguage *nL = [FIRNaturalLanguage naturalLanguageForApp:firebaseApp]; - FIRLanguageIdentification *languageId = [nL languageIdentificationWithOptions:identificationOptions]; - FIRIdentifyLanguageCallback completion = ^(NSString *_Nullable languageCode, NSError *_Nullable error) { - if (error != nil) { - [self promiseRejectMLKitException:reject error:error]; - } else { - resolve(languageCode); - } - }; - [languageId identifyLanguageForText:text completion:completion]; -} - -RCT_EXPORT_METHOD(identifyPossibleLanguages: - (FIRApp *) firebaseApp - : (NSString *)text - : (FIRLanguageIdentificationOptions *)identificationOptions - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - FIRNaturalLanguage *nL = [FIRNaturalLanguage naturalLanguageForApp:firebaseApp]; - FIRLanguageIdentification *languageId = [nL languageIdentificationWithOptions:identificationOptions]; - FIRIdentifyPossibleLanguagesCallback completion = ^(NSArray *identifiedLanguages, NSError *error) { - if (error != nil) { - [self promiseRejectMLKitException:reject error:error]; - } else { - NSMutableArray *languages = [[NSMutableArray alloc] initWithCapacity:identifiedLanguages.count]; - for (FIRIdentifiedLanguage *identifiedLanguage in identifiedLanguages) { - [languages addObject:@{ - @"language": identifiedLanguage.languageCode, - @"confidence": @(identifiedLanguage.confidence) - }]; - } - resolve(languages); - } - }; - [languageId identifyPossibleLanguagesForText:text completion:completion]; -} - -- (void)promiseRejectMLKitException:(RCTPromiseRejectBlock)reject error:(NSError *)error { - // TODO no way to distinguish between the error codes like Android supports - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; -} - -#endif - -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h deleted file mode 100644 index b6a0510774..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.h +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import - -#import - -@interface RNFBMLNaturalLanguageSmartReplyModule : NSObject - -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m deleted file mode 100644 index 9fb79b90bf..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageSmartReplyModule.m +++ /dev/null @@ -1,81 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import "RNFBMLNaturalLanguageSmartReplyModule.h" - -#if __has_include() -#import -#import "RNFBSharedUtils.h" -#define DEPENDENCY_EXISTS=1 -#endif - -@implementation RNFBMLNaturalLanguageSmartReplyModule -#pragma mark - -#pragma mark Module Setup - -RCT_EXPORT_MODULE(); - -#pragma mark - -#pragma mark Firebase Mlkit Smart Reply Methods - -#ifdef DEPENDENCY_EXISTS -RCT_EXPORT_METHOD(suggestReplies: - (FIRApp *) firebaseApp - : (NSArray *)messages - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - FIRNaturalLanguage *naturalLanguage = [FIRNaturalLanguage naturalLanguage]; - FIRSmartReply *smartReply = [naturalLanguage smartReply]; - - FIRSmartReplyCallback completion = ^( - FIRSmartReplySuggestionResult *_Nullable result, - NSError *_Nullable error - ) { - if (error != nil) { - [self promiseRejectMLKitException:reject error:error]; - return; - } - - if (result.status == FIRSmartReplyResultStatusSuccess) { - NSMutableArray *smartReplies = [[NSMutableArray alloc] initWithCapacity:result.suggestions.count]; - - for (FIRSmartReplySuggestion *suggestion in result.suggestions) { - NSMutableDictionary *smartReplyDict = [NSMutableDictionary dictionary]; - smartReplyDict[@"text"] = suggestion.text; - [smartReplies addObject:smartReplyDict]; - } - - resolve(smartReplies); - } else { - resolve(@[]); - } - }; - - [smartReply suggestRepliesForMessages:messages completion:completion]; -} - -- (void)promiseRejectMLKitException:(RCTPromiseRejectBlock)reject error:(NSError *)error { - // TODO no way to distinguish between the error codes like Android supports - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; -} -#endif - -@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h deleted file mode 100644 index d17b0c0dcb..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.h +++ /dev/null @@ -1,23 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -// TODO not supported until SDK 6.0.0 -// -//#import -//#import -// -//@interface RNFBMLNaturalLanguageTranslateModule : NSObject -//@end diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m b/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m deleted file mode 100644 index 840d5c22cb..0000000000 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageTranslateModule.m +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -// TODO not supported until SDK 6.0.0 -// -// -//#import -//#import "RNFBMLNaturalLanguageTranslateModule.h" -// -// -//@implementation RNFBMLNaturalLanguageTranslateModule -//#pragma mark - -//#pragma mark Module Setup -// -//RCT_EXPORT_MODULE(); -// -//- (dispatch_queue_t)methodQueue { -// return dispatch_get_main_queue(); -//} -// -//#pragma mark - -//#pragma mark Firebase Mlkit Translate Methods -//@end diff --git a/packages/ml-natural-language/lib/TranslateModelManager.js b/packages/ml-natural-language/lib/TranslateModelManager.js deleted file mode 100644 index cd3f731ceb..0000000000 --- a/packages/ml-natural-language/lib/TranslateModelManager.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -// TODO not available on iOS until SDK 6.0.0 -// export default class TranslateModelManager { -// constructor(ml) { -// this.ml = ml; -// } -// -// downloadRemoteModelIfNeeded(language, downloadConditions = {}) { -// // TODO(salakar) arg validation + tests -// // downloadConditions: -// // requireCharging -// // requireDeviceIdle -// // requireDeviceIdle -// const languageId = this.ml.native.TRANSLATE_LANGUAGES[language]; -// return this.ml.native.modelManagerDownloadRemoteModelIfNeeded(languageId, downloadConditions); -// } -// -// // TODO no ios support until SDK v6.0.0 -// deleteDownloadedModel(language) { -// return this.ml.native.modelManagerDeleteDownloadedModel(language); -// } -// -// getAvailableModels() { -// return this.ml.native.modelManagerGetAvailableModels(); -// } -// } diff --git a/packages/ml-natural-language/lib/index.d.ts b/packages/ml-natural-language/lib/index.d.ts deleted file mode 100644 index ef4e4e12e6..0000000000 --- a/packages/ml-natural-language/lib/index.d.ts +++ /dev/null @@ -1,271 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { ReactNativeFirebase } from '@react-native-firebase/app'; - -/** - * Firebase ML Kit package for React Native. - * - * #### Example 1 - * - * Access the firebase export from the `naturalLanguage` package: - * - * ```js - * import { firebase } from '@react-native-firebase/ml-natural-language'; - * - * // firebase.naturalLanguage().X - * ``` - * - * #### Example 2 - * - * Using the default export from the `naturalLanguage` package: - * - * ```js - * import naturalLanguage from '@react-native-firebase/ml-natural-language'; - * - * // naturalLanguage().X - * ``` - * - * #### Example 3 - * - * Using the default export from the `app` package: - * - * ```js - * import firebase from '@react-native-firebase/app'; - * import '@react-native-firebase/ml-natural-language'; - * - * // firebase.naturalLanguage().X - * ``` - * - * @firebase ml-natural-language - */ -export namespace FirebaseLanguageTypes { - import FirebaseModule = ReactNativeFirebase.FirebaseModule; - - // eslint-disable-next-line @typescript-eslint/no-empty-interface - export interface Statics {} - - /** - * An interface representing the language identification options to be used with the - * `identifyLanguage` and `identifyPossibleLanguages` methods. - */ - export interface LanguageIdentificationOptions { - /** - * The confidence threshold for language identification. The identified languages will have a - * confidence higher or equal to the confidence threshold. The value should be between 0 and 1, e.g. 0.5. - * - * If no value is set, a default value is used instead. - * - */ - confidenceThreshold?: number; - } - - /** - * An identified language for the given input text. Returned as an Array of IdentifiedLanguage from - * `identifyPossibleLanguages`. - */ - export interface IdentifiedLanguage { - /** - * The [BCP-47 language code](https://en.wikipedia.org/wiki/IETF_language_tag) for the language, e.g. 'en'. - */ - language: string; - - /** - * The confidence score of the language. A float value between 0 and 1. - */ - confidence: number; - } - - /** - * An interface representing a suggested reply, an array of these are returned from `suggestReplies`. - * - * #### Example - * - * ```js - * const replies = await firebase.naturalLanguage().suggestReplies([ - * { text: "Hey, long time no speak!", }, - * { text: 'I know right, it has been a while..', userId: 'xxxx', isLocalUser: false }, - * { text: 'We should catchup some time!', }, - * { text: 'Definitely, how about we go for lunch this week?', userId: 'xxxx', isLocalUser: false }, - * ]); - * - * replies.forEach(reply => { - * console.log(reply.text); - * }); - * - * ``` - * - */ - export interface SuggestedReply { - /** - * The smart reply text. - */ - text: string; - } - - /** - * The Firebase ML Kit service interface. - * - * > This module is available for the default app only. - * - * #### Example - * - * Get the ML Kit service for the default app: - * - * ```js - * const defaultAppMLKit = firebase.naturalLanguage(); - * ``` - */ - export class Module extends FirebaseModule { - /** - * Identifies the main language for the given text. - * - * Returns a promise that resolves with a [BCP-47 language code](https://en.wikipedia.org/wiki/IETF_language_tag) of the detected language. - * - * If the language was undetected or unknown the code returned is `und`. - * - * #### Example - * - * ```js - * const language = await firebase.naturalLanguage().identifyLanguage('Hello there. General Kenobi.'); - * console.warn(language); // en - * - * const unknownLanguage = await firebase.naturalLanguage().identifyLanguage('foo bar baz', { confidenceThreshold: 0.9 }); - * console.warn(language); // und - * ``` - * - * @param text The input text to use for identifying the language. Inputs longer than 200 characters are truncated to 200 characters, as longer input does not improve the detection accuracy. - * @param options See `LanguageIdentificationOptions`. - */ - identifyLanguage(text: string, options?: LanguageIdentificationOptions): Promise; - - /** - * Identifies possible languages for the given text. - * - * #### Example - * - * ```js - * const identifiedLanguages = firebase.naturalLanguage().identifyPossibleLanguages('hello world'); - * console.warn(identifiedLanguages[0].language); // en - * ``` - * - * @param text The input text to use for identifying the language. Inputs longer than 200 characters are truncated to 200 characters, as longer input does not improve the detection accuracy. - * @param options See `LanguageIdentificationOptions`. - */ - identifyPossibleLanguages( - text: string, - options?: LanguageIdentificationOptions, - ): Promise; - - /** - * Returns suggested replies for a conversation. - * - * #### Example - * - * ```js - * const replies = await firebase.naturalLanguage().suggestReplies([ - * { text: "Hey, long time no speak!", }, - * { text: 'I know right, it has been a while..', userId: 'xxxx', isLocalUser: false }, - * { text: 'We should catchup some time!', }, - * { text: 'Definitely, how about we go for lunch this week?', userId: 'xxxx', isLocalUser: false }, - * ]); - * ``` - * - * @param messages An array of `TextMessage` interfaces. - */ - suggestReplies(messages: TextMessage[]): Promise; - } - - /** - * A `TextMessage` interface provided to `suggestReplies()`. - */ - export interface TextMessage { - /** - * The message text. - * - * This is required and must not be an empty string. - */ - text: string; - - /** - * Whether the message is a local user. If false, a `userId` must be provided for the message. - * - * Defaults to true. - */ - isLocalUser?: boolean; - - /** - * A user ID of a remote user. - * - * Used to help better identify users to provide more accurate replies. - */ - userId?: string; - - /** - * The timestamp of the message in milliseconds. - * - * Defaults to now (`Date.now()`). - */ - timestamp?: number; - } -} - -declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp< - FirebaseLanguageTypes.Module, - FirebaseLanguageTypes.Statics ->; - -export const firebase: ReactNativeFirebase.Module & { - naturalLanguage: typeof defaultExport; - app( - name?: string, - ): ReactNativeFirebase.FirebaseApp & { naturalLanguage(): FirebaseLanguageTypes.Module }; -}; - -export default defaultExport; - -/** - * Attach namespace to `firebase.` and `FirebaseApp.`. - */ -declare module '@react-native-firebase/app' { - namespace ReactNativeFirebase { - import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp; - - interface Module { - naturalLanguage: FirebaseModuleWithStaticsAndApp< - FirebaseLanguageTypes.Module, - FirebaseLanguageTypes.Statics - >; - } - - interface FirebaseApp { - naturalLanguage(): FirebaseLanguageTypes.Module; - } - - interface FirebaseJsonConfig { - /** - * If `true`, the Language ID Model will be installed onto the device. - */ - ml_natural_language_language_id_model: boolean; - - /** - * If `true`, the Smart Reply Model will be installed onto the device. - */ - ml_natural_language_smart_reply_model: boolean; - } - } -} diff --git a/packages/ml-natural-language/lib/index.js b/packages/ml-natural-language/lib/index.js deleted file mode 100644 index f619d4e806..0000000000 --- a/packages/ml-natural-language/lib/index.js +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { - isArray, - isNumber, - isObject, - isString, - isUndefined, - validateOptionalNativeDependencyExists, -} from '@react-native-firebase/app/lib/common'; -import { - createModuleNamespace, - FirebaseModule, - getFirebaseRoot, -} from '@react-native-firebase/app/lib/internal'; -import validateTextMessage from './validateTextMessage'; -import version from './version'; - -// TODO not available on iOS until SDK 6.0.0 -// import TranslateModelManager from './TranslateModelManager'; - -const statics = {}; -const namespace = 'naturalLanguage'; -const nativeModuleName = [ - 'RNFBMLNaturalLanguageIdModule', - 'RNFBMLNaturalLanguageTranslateModule', - 'RNFBMLNaturalLanguageSmartReplyModule', -]; - -function validateIdentifyLanguageArgs(text, options, methodName) { - if (!isString(text)) { - throw new Error( - `firebase.naturalLanguage().${methodName}(*, _) 'text' must be a string value.`, - ); - } - - if (!isObject(options)) { - throw new Error( - `firebase.naturalLanguage().${methodName}(_, *) 'options' must be an object or undefined.`, - ); - } - - if ( - !isUndefined(options.confidenceThreshold) && - (!isNumber(options.confidenceThreshold) || - options.confidenceThreshold < 0 || - options.confidenceThreshold > 1) - ) { - throw new Error( - `firebase.naturalLanguage().${methodName}(_, *) 'options.confidenceThreshold' must be a float value between 0 and 1.`, - ); - } -} - -class FirebaseMlKitLanguageModule extends FirebaseModule { - identifyLanguage(text, options = {}) { - validateOptionalNativeDependencyExists( - 'ml_natural_language_language_id_model', - 'ML Kit Language Identification', - !!this.native.identifyLanguage, - ); - validateIdentifyLanguageArgs(text, options, 'identifyLanguage'); - return this.native.identifyLanguage(text.slice(0, 200), options); - } - - identifyPossibleLanguages(text, options = {}) { - validateOptionalNativeDependencyExists( - 'ml_natural_language_language_id_model', - 'ML Kit Language Identification', - !!this.native.identifyPossibleLanguages, - ); - validateIdentifyLanguageArgs(text, options, 'identifyPossibleLanguages'); - return this.native.identifyPossibleLanguages( - text.slice(0, 200), - Object.assign({}, options, { multipleLanguages: true }), - ); - } - - suggestReplies(messages) { - if (!isArray(messages)) { - throw new Error( - "firebase.naturalLanguage().suggestReplies(*) 'messages' must be an array value.", - ); - } - - if (messages.length === 0) { - return Promise.resolve([]); - } - - const validated = []; - - for (let i = 0; i < messages.length; i++) { - try { - validated.push(validateTextMessage(messages[i])); - } catch (e) { - throw new Error( - `firebase.naturalLanguage().suggestReplies(*) 'messages' object at index ${i} threw an error. ${e.message}.`, - ); - } - } - - return this.native.suggestReplies(validated); - } -} - -// import { SDK_VERSION } from '@react-native-firebase/mlkit'; -export const SDK_VERSION = version; - -// import naturalLanguage from '@react-native-firebase/mlkit'; -// naturalLanguage().X(...); -export default createModuleNamespace({ - statics, - version, - namespace, - nativeModuleName, - nativeEvents: false, - hasMultiAppSupport: true, - hasCustomUrlOrRegionSupport: false, - ModuleClass: FirebaseMlKitLanguageModule, -}); - -// import naturalLanguage, { firebase } from '@react-native-firebase/mlkit'; -// naturalLanguage().X(...); -// firebase.naturalLanguage().X(...); -export const firebase = getFirebaseRoot(); - -// TODO not available on Firebase iOS until SDK 6.0.0, add in RNFB >6.1 -// -------------------------- -// LANGUAGE_TRANSLATE -// -------------------------- -// translate(text, translationOptions) { -// const _translationOptions = {}; -// -// // retrieve the language id integers -// const { sourceLanguage, targetLanguage } = translationOptions; -// _translationOptions.sourceLanguage = this.native.TRANSLATE_LANGUAGES[sourceLanguage]; -// _translationOptions.targetLanguage = this.native.TRANSLATE_LANGUAGES[targetLanguage]; -// // translationOptions required: -// // sourceLanguage -// // targetLanguage -// return this.native.translate(text, _translationOptions); -// } -// -// get translateModelManager() { -// return new TranslateModelManager(this); -// } diff --git a/packages/ml-natural-language/lib/validateTextMessage.js b/packages/ml-natural-language/lib/validateTextMessage.js deleted file mode 100644 index 9b39f47462..0000000000 --- a/packages/ml-natural-language/lib/validateTextMessage.js +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { - hasOwnProperty, - isBoolean, - isNumber, - isObject, - isString, -} from '@react-native-firebase/app/lib/common'; - -export default function validateTextMessage(textMessage) { - if (!isObject(textMessage)) { - throw new Error("'textMessage' expected an object value"); - } - - const out = { - timestamp: Date.now(), - isLocalUser: true, - }; - - if (!isString(textMessage.text)) { - throw new Error("'textMessage.text' expected a string value"); - } - - if (textMessage.text.length === 0) { - throw new Error("'textMessage.text' expected string value to not be empty"); - } - - out.text = textMessage.text; - - if (hasOwnProperty(textMessage, 'timestamp')) { - if (!isNumber(textMessage.timestamp)) { - throw new Error("'textMessage.timestamp' expected number value (milliseconds)"); - } - - out.timestamp = textMessage.timestamp; - } - - if (hasOwnProperty(textMessage, 'isLocalUser')) { - if (!isBoolean(textMessage.isLocalUser)) { - throw new Error("'textMessage.isLocalUser' expected boolean value"); - } - - out.isLocalUser = textMessage.isLocalUser; - } - - if (out.isLocalUser && hasOwnProperty(textMessage, 'userId')) { - throw new Error( - "'textMessage.userId' expected 'textMessage.isLocalUser' to be false when setting a user ID.", - ); - } else if (!out.isLocalUser && !hasOwnProperty(textMessage, 'userId')) { - throw new Error("'textMessage.userId' expected a string value"); - } else if (!out.isLocalUser && hasOwnProperty(textMessage, 'userId')) { - if (!isString(textMessage.userId)) { - throw new Error("'textMessage.userId' expected a string value"); - } - - if (textMessage.userId.length === 0) { - throw new Error("'textMessage.userId' expected string value to not be empty"); - } - - out.userId = textMessage.userId; - } - - return out; -} diff --git a/packages/ml-natural-language/package.json b/packages/ml-natural-language/package.json deleted file mode 100644 index a49342b013..0000000000 --- a/packages/ml-natural-language/package.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "name": "@react-native-firebase/ml-natural-language", - "version": "7.4.11", - "author": "Invertase (http://invertase.io)", - "description": "React Native Firebase - Firebase ML Kit brings the power of machine learning to your React Native application, supporting both Android & iOS.", - "main": "lib/index.js", - "types": "lib/index.d.ts", - "scripts": { - "build": "genversion --semi lib/version.js", - "build:clean": "rimraf android/build && rimraf ios/build", - "prepare": "yarn run build" - }, - "repository": { - "type": "git", - "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml-natural-language" - }, - "license": "Apache-2.0", - "keywords": [ - "react", - "react-native", - "firebase", - "mlkit", - "identify language", - "smart replies", - "machine learning", - "barcode", - "label", - "natural language", - "nlp", - "vision" - ], - "peerDependencies": { - "@react-native-firebase/app": "8.4.7" - }, - "publishConfig": { - "access": "public" - } -} diff --git a/packages/ml-natural-language/type-test.ts b/packages/ml-natural-language/type-test.ts deleted file mode 100644 index bee7bb9abc..0000000000 --- a/packages/ml-natural-language/type-test.ts +++ /dev/null @@ -1,53 +0,0 @@ -import firebase from '@react-native-firebase/app'; -import * as language from '@react-native-firebase/ml-natural-language'; - -console.log(language.default().app); - -// checks module exists at root -console.log(firebase.naturalLanguage().app.name); - -// checks module exists at app level -console.log(firebase.app().naturalLanguage().app.name); - -// checks statics exist -console.log(firebase.naturalLanguage.SDK_VERSION); - -// checks statics exist on defaultExport -console.log(firebase.SDK_VERSION); - -// checks root exists -console.log(firebase.SDK_VERSION); - -// checks firebase named export exists on module -console.log(language.firebase.SDK_VERSION); - -// checks multi-app support exists -console.log(firebase.naturalLanguage(firebase.app()).app.name); - -firebase - .naturalLanguage() - .identifyLanguage('foo', { - confidenceThreshold: 0.3, - }) - .then(str => str.replace); - -firebase - .naturalLanguage() - .identifyPossibleLanguages('foo', { - confidenceThreshold: 0.3, - }) - .then(languages => languages.forEach($ => $.confidence)); - -firebase - .naturalLanguage() - .suggestReplies([ - { - text: 'foo', - isLocalUser: true, - userId: '123', - timestamp: 123, - }, - ]) - .then(replies => { - replies.forEach($ => $.text); - }); diff --git a/packages/ml-vision/.npmignore b/packages/ml-vision/.npmignore deleted file mode 100644 index 29e5aa19bb..0000000000 --- a/packages/ml-vision/.npmignore +++ /dev/null @@ -1,66 +0,0 @@ -# Built application files -android/*/build/ - -# Crashlytics configuations -android/com_crashlytics_export_strings.xml - -# Local configuration file (sdk path, etc) -android/local.properties - -# Gradle generated files -android/.gradle/ - -# Signing files -android/.signing/ - -# User-specific configurations -android/.idea/gradle.xml -android/.idea/libraries/ -android/.idea/workspace.xml -android/.idea/tasks.xml -android/.idea/.name -android/.idea/compiler.xml -android/.idea/copyright/profiles_settings.xml -android/.idea/encodings.xml -android/.idea/misc.xml -android/.idea/modules.xml -android/.idea/scopes/scope_settings.xml -android/.idea/vcs.xml -android/*.iml - -# Xcode -*.pbxuser -*.mode1v3 -*.mode2v3 -*.perspectivev3 -*.xcuserstate -ios/Pods -ios/build -*project.xcworkspace* -*xcuserdata* - -# OS-specific files -.DS_Store -.DS_Store? -._* -.Spotlight-V100 -.Trashes -ehthumbs.db -Thumbs.dbandroid/gradle -android/gradlew -android/build -android/gradlew.bat -android/gradle/ - -.idea -coverage -yarn.lock -e2e/ -.github -.vscode -.nyc_output -android/.settings -*.coverage.json -.circleci -.eslintignore -type-test.ts diff --git a/packages/ml-vision/LICENSE b/packages/ml-vision/LICENSE deleted file mode 100644 index ef3ed44f06..0000000000 --- a/packages/ml-vision/LICENSE +++ /dev/null @@ -1,32 +0,0 @@ -Apache-2.0 License ------------------- - -Copyright (c) 2016-present Invertase Limited & Contributors - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this library except in compliance with the License. - -You may obtain a copy of the Apache-2.0 License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. - - -Creative Commons Attribution 3.0 License ----------------------------------------- - -Copyright (c) 2016-present Invertase Limited & Contributors - -Documentation and other instructional materials provided for this project -(including on a separate documentation repository or it's documentation website) are -licensed under the Creative Commons Attribution 3.0 License. Code samples/blocks -contained therein are licensed under the Apache License, Version 2.0 (the "License"), as above. - -You may obtain a copy of the Creative Commons Attribution 3.0 License at - - https://creativecommons.org/licenses/by/3.0/ diff --git a/packages/ml-vision/README.md b/packages/ml-vision/README.md deleted file mode 100644 index de5fcec63e..0000000000 --- a/packages/ml-vision/README.md +++ /dev/null @@ -1,31 +0,0 @@ -

- -
-
-

React Native Firebase - ML Kit Vision

-

- ---- - -# DEPRECATED - -This package is deprecated and should no longer be used. - -Google has split mobile machine learning functionality into two pieces: - -1. "On-Device" inferences - this will be handled via the standalone ["Google ML Kit"](https://developers.google.com/ml-kit) libraries, and the related [`react-native-mlkit`](https://github.com/invertase/react-native-mlkit) package. This includes any APIs where the device uses a local model to make inferences - -1. "Cloud" inferences - these will continue in Firebase, but are now in the ["Firebase ML"](https://firebase.google.com/docs/ml) library, and will be available from the new consolidated `@react-native-firebase/ml` package - -More information on the transition is available here: https://firebase.google.com/docs/ml#cloud_vs_on-device - ---- - -

- -

- Built and maintained with 💛 by Invertase. -

-

- ---- diff --git a/packages/ml-vision/android/.editorconfig b/packages/ml-vision/android/.editorconfig deleted file mode 100644 index 670398e990..0000000000 --- a/packages/ml-vision/android/.editorconfig +++ /dev/null @@ -1,10 +0,0 @@ -# editorconfig -root = true - -[*] -indent_style = space -indent_size = 2 -end_of_line = lf -charset = utf-8 -trim_trailing_whitespace = true -insert_final_newline = true diff --git a/packages/ml-vision/android/lint.xml b/packages/ml-vision/android/lint.xml deleted file mode 100644 index c3dd72aca0..0000000000 --- a/packages/ml-vision/android/lint.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/packages/ml-vision/android/ml-models.gradle b/packages/ml-vision/android/ml-models.gradle deleted file mode 100644 index af7ec7e37f..0000000000 --- a/packages/ml-vision/android/ml-models.gradle +++ /dev/null @@ -1,49 +0,0 @@ -apply from: file("./../../app/android/firebase-json.gradle") - -def mlModels = [ - 'ml_vision_face_model', - 'ml_vision_image_label_model', - // 'ml_vision_object_detection_model', // object tracking -> TODO 6.1 -] - -dependencies { - if (rootProject.ext && rootProject.ext.firebaseJson) { - mlModels.each { modelFlag -> - if (rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) { - rootProject.logger.info ":${project.name} model enabled: '${modelFlag}'" - implementation "com.google.firebase:firebase-${modelFlag.replaceAll("_", "-")}" - } else { - rootProject.logger.warn ":${project.name} model disabled: '${modelFlag}'" - } - } - } else { - rootProject.logger.warn ":${project.name} skipping optional models as no firebaseJson extension found, you may be missing a firebase.json file in the root of your React Native project, or you've not installed the @react-native-firebase/app package and included it in your app build." - } -} - -def manifestModels = [ - 'ml_vision_ocr_model', - 'ml_vision_face_model', - 'ml_vision_barcode_model', - 'ml_vision_label_model', - // 'ml_vision_ica_model', // object tracking -> TODO 6.1 -] - -def manifestModelsString = "" - -manifestModels.each { modelFlag -> - if (rootProject.ext && rootProject.ext.firebaseJson && rootProject.ext.firebaseJson.isFlagEnabled(modelFlag) == true) { - def modelIdentifier = modelFlag.replace("ml_vision_", "").replace("_model", "") - if (manifestModelsString.length() > 0) { - manifestModelsString += "," + modelIdentifier - } else { - manifestModelsString += modelIdentifier - } - } -} - -android { - defaultConfig { - manifestPlaceholders = [visionModels: manifestModelsString] - } -} diff --git a/packages/ml-vision/android/settings.gradle b/packages/ml-vision/android/settings.gradle deleted file mode 100644 index 2c89304799..0000000000 --- a/packages/ml-vision/android/settings.gradle +++ /dev/null @@ -1 +0,0 @@ -rootProject.name = '@react-native-firebase_ml-vision' diff --git a/packages/ml-vision/android/src/main/AndroidManifest.xml b/packages/ml-vision/android/src/main/AndroidManifest.xml deleted file mode 100644 index ed3a069d68..0000000000 --- a/packages/ml-vision/android/src/main/AndroidManifest.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java b/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java deleted file mode 100644 index 602fa6f456..0000000000 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionBarcodeDetectorModule.java +++ /dev/null @@ -1,282 +0,0 @@ -package io.invertase.firebase.ml.vision; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - -import android.content.Context; -import android.os.Bundle; -import com.google.android.gms.tasks.Task; -import com.google.android.gms.tasks.Tasks; -import com.google.firebase.FirebaseApp; -import com.google.firebase.ml.vision.FirebaseVision; -import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode; -import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector; -import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions; -import com.google.firebase.ml.vision.common.FirebaseVisionImage; -import io.invertase.firebase.common.SharedUtils; -import io.invertase.firebase.common.UniversalFirebaseModule; - -import java.util.*; - -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; - -@SuppressWarnings("ConstantConditions") -class UniversalFirebaseMLVisionBarcodeDetectorModule extends UniversalFirebaseModule { - - UniversalFirebaseMLVisionBarcodeDetectorModule(Context context, String serviceName) { - super(context, serviceName); - } - - Task>> barcodeDetectorProcessImage(String appName, String stringUri, Bundle barcodeDetectorOptions) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseVisionBarcodeDetectorOptions options = getBarcodeDetectorOptions(barcodeDetectorOptions); - - FirebaseVisionBarcodeDetector visionBarcodeDetector = FirebaseVision.getInstance(firebaseApp) - .getVisionBarcodeDetector(options); - - FirebaseVisionImage image = FirebaseVisionImage.fromFilePath( - getContext(), - SharedUtils.getUri(stringUri) - ); - - List detectedBarcodesRaw = Tasks.await(visionBarcodeDetector.detectInImage(image)); - - return getBarcodesList(detectedBarcodesRaw); - }); - } - - private List> getBarcodesList(List detectedBarcodesRaw) { - List> detectedBarcodesFormatted = new ArrayList<>(detectedBarcodesRaw.size()); - - for (FirebaseVisionBarcode barcode : detectedBarcodesRaw) { - Map barcodeMap = new HashMap<>(); - barcodeMap.put(KEY_BOUNDING_BOX, SharedUtils.rectToIntArray(barcode.getBoundingBox())); - barcodeMap.put(KEY_CORNER_POINTS, SharedUtils.pointsToIntsList(barcode.getCornerPoints())); - barcodeMap.put(KEY_FORMAT, barcode.getFormat()); - barcodeMap.put(KEY_VALUE_TYPE, barcode.getValueType()); - barcodeMap.put(KEY_DISPLAY_VALUE, barcode.getDisplayValue()); - barcodeMap.put(KEY_RAW_VALUE, barcode.getRawValue()); - - // `calendarEvent` - addCalendarEventFromBarcodeToMap(barcode, barcodeMap); - - // `contactInfo` - addContactInfoFromBarcodeToMap(barcode, barcodeMap); - - // driverLicense - addDriverLicenseFromBarcodeToMap(barcode, barcodeMap); - - // email - addEmailFromBarcodeToMap(barcode, barcodeMap); - - // geoPoint - addGeoPointFromBarcodeToMap(barcode, barcodeMap); - - // phone - addPhoneFromBarcodeToMap(barcode, barcodeMap); - - // sms - addSmsFromBarcodeToMap(barcode, barcodeMap); - - // url - addUrlFromBarcodeToMap(barcode, barcodeMap); - - // wifi - addWifiFromBarcodeToMap(barcode, barcodeMap); - - detectedBarcodesFormatted.add(barcodeMap); - } - - return detectedBarcodesFormatted; - } - - private void addDriverLicenseFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getDriverLicense() == null) return; - Map driverLicenseMap = new HashMap<>(); - FirebaseVisionBarcode.DriverLicense driverLicense = barcode.getDriverLicense(); - driverLicenseMap.put("addressCity", driverLicense.getAddressCity()); - driverLicenseMap.put("addressState", driverLicense.getAddressState()); - driverLicenseMap.put("addressStreet", driverLicense.getAddressStreet()); - driverLicenseMap.put("addressZip", driverLicense.getAddressZip()); - driverLicenseMap.put("birthDate", driverLicense.getBirthDate()); - driverLicenseMap.put("documentType", driverLicense.getDocumentType()); - driverLicenseMap.put("expiryDate", driverLicense.getExpiryDate()); - driverLicenseMap.put("firstName", driverLicense.getFirstName()); - driverLicenseMap.put("gender", driverLicense.getGender()); - driverLicenseMap.put("issueDate", driverLicense.getIssueDate()); - driverLicenseMap.put("issuingCountry", driverLicense.getIssuingCountry()); - driverLicenseMap.put("lastName", driverLicense.getLastName()); - driverLicenseMap.put("licenseNumber", driverLicense.getLicenseNumber()); - driverLicenseMap.put("middleName", driverLicense.getMiddleName()); - barcodeMap.put("driverLicense", driverLicenseMap); - } - - private void addGeoPointFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getGeoPoint() == null) return; - List latLng = new ArrayList<>(2); - FirebaseVisionBarcode.GeoPoint geoPoint = barcode.getGeoPoint(); - latLng.add(geoPoint.getLat()); - latLng.add(geoPoint.getLng()); - barcodeMap.put(KEY_GEO_POINT, latLng); - } - - private void addSmsFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getSms() == null) return; - Map smsMap = new HashMap<>(); - FirebaseVisionBarcode.Sms sms = barcode.getSms(); - smsMap.put("message", sms.getMessage()); - smsMap.put("phoneNumber", sms.getPhoneNumber()); - barcodeMap.put(KEY_SMS, smsMap); - } - - private void addUrlFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getUrl() == null) return; - Map urlMap = new HashMap<>(); - FirebaseVisionBarcode.UrlBookmark url = barcode.getUrl(); - urlMap.put("title", url.getTitle()); - urlMap.put("url", url.getUrl()); - barcodeMap.put(KEY_URL, urlMap); - } - - private void addWifiFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getWifi() == null) return; - Map wifiMap = new HashMap<>(); - FirebaseVisionBarcode.WiFi wiFi = barcode.getWifi(); - wifiMap.put("encryptionType", wiFi.getEncryptionType()); - wifiMap.put("password", wiFi.getPassword()); - wifiMap.put("ssid", wiFi.getSsid()); - barcodeMap.put(KEY_WIFI, wifiMap); - } - - private void addEmailFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getEmail() == null) return; - barcodeMap.put(KEY_EMAIL, getEmailMap(barcode.getEmail())); - } - - private void addPhoneFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getPhone() == null) return; - barcodeMap.put(KEY_PHONE, getPhoneMap(barcode.getPhone())); - } - - private void addCalendarEventFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getCalendarEvent() == null) return; - Map calendarEventMap = new HashMap<>(); - FirebaseVisionBarcode.CalendarEvent calendarEvent = barcode.getCalendarEvent(); - calendarEventMap.put("description", calendarEvent.getDescription()); - calendarEventMap.put("end", calendarEvent.getEnd().getRawValue()); - calendarEventMap.put("location", calendarEvent.getLocation()); - calendarEventMap.put("organizer", calendarEvent.getOrganizer()); - calendarEventMap.put("start", calendarEvent.getStart().getRawValue()); - calendarEventMap.put("status", calendarEvent.getStatus()); - calendarEventMap.put("summary", calendarEvent.getSummary()); - barcodeMap.put(KEY_CALENDAR_EVENT, calendarEventMap); - } - - private void addContactInfoFromBarcodeToMap(FirebaseVisionBarcode barcode, Map barcodeMap) { - if (barcode.getContactInfo() == null) return; - FirebaseVisionBarcode.ContactInfo contactInfo = barcode.getContactInfo(); - Map contactInfoMap = new HashMap<>(); - - contactInfoMap.put("title", contactInfo.getTitle()); - contactInfoMap.put("organization", contactInfo.getOrganization()); - if (contactInfo.getUrls() == null) { - contactInfoMap.put("urls", new String[]{}); - } else { - contactInfoMap.put("urls", contactInfo.getUrls()); - } - - // phones - List phonesListRaw = contactInfo.getPhones(); - List> phonesListFormatted = new ArrayList<>(phonesListRaw.size()); - for (FirebaseVisionBarcode.Phone phone : phonesListRaw) { - phonesListFormatted.add(getPhoneMap(phone)); - } - contactInfoMap.put("phones", phonesListFormatted); - - // emails - List emailsListRaw = contactInfo.getEmails(); - List> emailsListFormatted = new ArrayList<>(emailsListRaw.size()); - for (FirebaseVisionBarcode.Email email : emailsListRaw) { - emailsListFormatted.add(getEmailMap(email)); - } - contactInfoMap.put("emails", emailsListFormatted); - - // person name - contactInfoMap.put("name", getPersonNameMap(contactInfo.getName())); - - // addresses - List addressListRaw = contactInfo.getAddresses(); - List> addressListFormatted = new ArrayList<>(addressListRaw.size()); - for (FirebaseVisionBarcode.Address email : addressListRaw) { - addressListFormatted.add(getAddressMap(email)); - } - contactInfoMap.put("addresses", addressListFormatted); - - barcodeMap.put(KEY_CONTACT_INFO, contactInfoMap); - } - - private Map getAddressMap(FirebaseVisionBarcode.Address address) { - Map addressMap = new HashMap<>(); - addressMap.put("lines", address.getAddressLines()); - addressMap.put("type", address.getType()); - return addressMap; - } - - private Map getPersonNameMap(FirebaseVisionBarcode.PersonName personName) { - Map personNameMap = new HashMap<>(7); - personNameMap.put("first", personName.getFirst()); - personNameMap.put("formatted", personName.getFormattedName()); - personNameMap.put("last", personName.getLast()); - personNameMap.put("middle", personName.getMiddle()); - personNameMap.put("prefix", personName.getPrefix()); - personNameMap.put("pronunciation", personName.getPronunciation()); - personNameMap.put("suffix", personName.getSuffix()); - return personNameMap; - } - - private Map getEmailMap(FirebaseVisionBarcode.Email email) { - Map emailMap = new HashMap<>(3); - emailMap.put("address", email.getAddress()); - emailMap.put("body", email.getBody()); - emailMap.put("subject", email.getSubject()); - return emailMap; - } - - private Map getPhoneMap(FirebaseVisionBarcode.Phone phone) { - Map phoneMap = new HashMap<>(); - phoneMap.put("number", phone.getNumber()); - phoneMap.put("type", phone.getType()); - return phoneMap; - } - - private FirebaseVisionBarcodeDetectorOptions getBarcodeDetectorOptions(Bundle barcodeDetectorOptionsBundle) { - FirebaseVisionBarcodeDetectorOptions.Builder builder = new FirebaseVisionBarcodeDetectorOptions.Builder(); - - int[] formats = barcodeDetectorOptionsBundle.getIntArray("barcodeFormats"); - if (formats == null) return builder.build(); - - if (formats.length == 1) { - builder.setBarcodeFormats(formats[0]); - } else if (formats.length > 1) { - builder.setBarcodeFormats(formats[0], Arrays.copyOfRange(formats, 1, formats.length)); - } - - return builder.build(); - } -} diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java b/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java deleted file mode 100644 index 72d6768b56..0000000000 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionFaceDetectorModule.java +++ /dev/null @@ -1,275 +0,0 @@ -package io.invertase.firebase.ml.vision; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - -import android.content.Context; -import android.os.Bundle; -import com.google.android.gms.tasks.Task; -import com.google.android.gms.tasks.Tasks; -import com.google.firebase.FirebaseApp; -import com.google.firebase.ml.vision.FirebaseVision; -import com.google.firebase.ml.vision.common.FirebaseVisionImage; -import com.google.firebase.ml.vision.common.FirebaseVisionPoint; -import com.google.firebase.ml.vision.face.*; -import io.invertase.firebase.common.SharedUtils; -import io.invertase.firebase.common.UniversalFirebaseModule; - -import java.util.*; - -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; - -class UniversalFirebaseMLVisionFaceDetectorModule extends UniversalFirebaseModule { - UniversalFirebaseMLVisionFaceDetectorModule(Context context, String serviceName) { - super(context, serviceName); - } - - Task>> faceDetectorProcessImage( - String appName, - String stringUri, - Bundle faceDetectorOptionsBundle - ) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseVisionFaceDetectorOptions options = getFaceDetectorOptions(faceDetectorOptionsBundle); - FirebaseVisionFaceDetector visionFaceDetector = FirebaseVision.getInstance(firebaseApp) - .getVisionFaceDetector(options); - FirebaseVisionImage image = FirebaseVisionImage.fromFilePath( - getContext(), - SharedUtils.getUri(stringUri) - ); - - List visionFacesRaw = Tasks.await(visionFaceDetector.detectInImage(image)); - List> visionFacesFormatted = new ArrayList<>(visionFacesRaw.size()); - - for (FirebaseVisionFace visionFaceRaw : visionFacesRaw) { - Map visionFaceFormatted = new HashMap<>(); - - visionFaceFormatted.put( - KEY_BOUNDING_BOX, - SharedUtils.rectToIntArray(visionFaceRaw.getBoundingBox()) - ); - visionFaceFormatted.put(KEY_HEAD_EULER_ANGLE_Y, visionFaceRaw.getHeadEulerAngleY()); - visionFaceFormatted.put(KEY_HEAD_EULER_ANGLE_Z, visionFaceRaw.getHeadEulerAngleZ()); - visionFaceFormatted.put( - KEY_LEFT_EYE_OPEN_PROBABILITY, - visionFaceRaw.getLeftEyeOpenProbability() - ); - visionFaceFormatted.put( - KEY_RIGHT_EYE_OPEN_PROBABILITY, - visionFaceRaw.getRightEyeOpenProbability() - ); - - visionFaceFormatted.put(KEY_SMILING_PROBABILITY, visionFaceRaw.getSmilingProbability()); - visionFaceFormatted.put(KEY_TRACKING_ID, visionFaceRaw.getTrackingId()); - - List> faceContoursFormatted; - - if (options.getContourMode() == FirebaseVisionFaceDetectorOptions.NO_CONTOURS) { - faceContoursFormatted = new ArrayList<>(0); - } else { - faceContoursFormatted = new ArrayList<>(14); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.ALL_POINTS))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.FACE))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYEBROW_TOP))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYEBROW_BOTTOM))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYEBROW_TOP))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYEBROW_BOTTOM))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LEFT_EYE))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.RIGHT_EYE))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.UPPER_LIP_TOP))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.UPPER_LIP_BOTTOM))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LOWER_LIP_TOP))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.LOWER_LIP_BOTTOM))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.NOSE_BRIDGE))); - faceContoursFormatted.add(getContourMap(visionFaceRaw.getContour(FirebaseVisionFaceContour.NOSE_BOTTOM))); - } - - visionFaceFormatted.put(KEY_FACE_CONTOURS, faceContoursFormatted); - - List> faceLandmarksFormatted = new ArrayList<>(14); - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_BOTTOM) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_BOTTOM))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_RIGHT) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_RIGHT))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_LEFT) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.MOUTH_LEFT))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EYE) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EYE))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EYE) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EYE))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EAR) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_EAR))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EAR) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_EAR))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_CHEEK) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.RIGHT_CHEEK))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_CHEEK) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.LEFT_CHEEK))) - ); - } - - if (visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.NOSE_BASE) != null) { - faceLandmarksFormatted.add(getLandmarkMap( - Objects.requireNonNull(visionFaceRaw.getLandmark(FirebaseVisionFaceLandmark.NOSE_BASE))) - ); - } - - visionFaceFormatted.put(KEY_LANDMARKS, faceLandmarksFormatted); - visionFacesFormatted.add(visionFaceFormatted); - } - - return visionFacesFormatted; - }); - } - - private Map getLandmarkMap(FirebaseVisionFaceLandmark visionFaceLandmark) { - Map visionFaceLandmarkMap = new HashMap<>(); - visionFaceLandmarkMap.put(KEY_TYPE, visionFaceLandmark.getLandmarkType()); - visionFaceLandmarkMap.put(KEY_POSITION, getVisionPointMap(visionFaceLandmark.getPosition())); - return visionFaceLandmarkMap; - } - - private float[] getVisionPointMap(FirebaseVisionPoint visionPoint) { - return new float[]{visionPoint.getX(), visionPoint.getY()}; - } - - private Map getContourMap(FirebaseVisionFaceContour visionFaceContour) { - Map visionFaceContourMap = new HashMap<>(); - - List pointsListRaw = visionFaceContour.getPoints(); - List pointsListFormatted = new ArrayList<>(pointsListRaw.size()); - for (FirebaseVisionPoint pointRaw : pointsListRaw) { - pointsListFormatted.add(getVisionPointMap(pointRaw)); - } - - visionFaceContourMap.put(KEY_TYPE, visionFaceContour.getFaceContourType()); - visionFaceContourMap.put(KEY_POINTS, pointsListFormatted); - - return visionFaceContourMap; - } - - - private FirebaseVisionFaceDetectorOptions getFaceDetectorOptions(Bundle faceDetectorOptionsBundle) { - FirebaseVisionFaceDetectorOptions.Builder builder = new FirebaseVisionFaceDetectorOptions.Builder(); - - if (faceDetectorOptionsBundle.getBoolean(KEY_ENABLE_TRACKING)) { - builder.enableTracking(); - } - - if (faceDetectorOptionsBundle.containsKey(KEY_CLASSIFICATION_MODE)) { - int classificationMode = (int) faceDetectorOptionsBundle.getDouble(KEY_CLASSIFICATION_MODE); - switch (classificationMode) { - case FirebaseVisionFaceDetectorOptions.NO_CLASSIFICATIONS: - builder.setClassificationMode(FirebaseVisionFaceDetectorOptions.NO_CLASSIFICATIONS); - break; - case FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS: - builder.setClassificationMode(FirebaseVisionFaceDetectorOptions.ALL_CLASSIFICATIONS); - break; - default: - throw new IllegalArgumentException( - "Invalid 'classificationMode' Face Detector option, must be either 1 or 2."); - } - } - - if (faceDetectorOptionsBundle.containsKey(KEY_CONTOUR_MODE)) { - int contourMode = (int) faceDetectorOptionsBundle.getDouble(KEY_CONTOUR_MODE); - switch (contourMode) { - case FirebaseVisionFaceDetectorOptions.NO_CONTOURS: - builder.setContourMode(FirebaseVisionFaceDetectorOptions.NO_CONTOURS); - break; - case FirebaseVisionFaceDetectorOptions.ALL_CONTOURS: - builder.setContourMode(FirebaseVisionFaceDetectorOptions.ALL_CONTOURS); - break; - default: - throw new IllegalArgumentException( - "Invalid 'contourMode' Face Detector option, must be either 1 or 2."); - } - } - - if (faceDetectorOptionsBundle.containsKey(KEY_LANDMARK_MODE)) { - int landmarkMode = (int) faceDetectorOptionsBundle.getDouble(KEY_LANDMARK_MODE); - switch (landmarkMode) { - case FirebaseVisionFaceDetectorOptions.NO_LANDMARKS: - builder.setLandmarkMode(FirebaseVisionFaceDetectorOptions.NO_LANDMARKS); - break; - case FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS: - builder.setLandmarkMode(FirebaseVisionFaceDetectorOptions.ALL_LANDMARKS); - break; - default: - throw new IllegalArgumentException( - "Invalid 'landmarkMode' Face Detector option, must be either 1 or 2."); - } - } - - if (faceDetectorOptionsBundle.containsKey(KEY_MIN_FACE_SIZE)) { - float minFaceSize = (float) faceDetectorOptionsBundle.getDouble(KEY_MIN_FACE_SIZE); - builder.setMinFaceSize(minFaceSize); - } - - if (faceDetectorOptionsBundle.containsKey(KEY_PERFORMANCE_MODE)) { - int performanceMode = (int) faceDetectorOptionsBundle.getDouble(KEY_PERFORMANCE_MODE); - switch (performanceMode) { - case FirebaseVisionFaceDetectorOptions.FAST: - builder.setPerformanceMode(FirebaseVisionFaceDetectorOptions.FAST); - break; - case FirebaseVisionFaceDetectorOptions.ACCURATE: - builder.setPerformanceMode(FirebaseVisionFaceDetectorOptions.ACCURATE); - break; - default: - throw new IllegalArgumentException( - "Invalid 'performanceMode' Face Detector option, must be either 1 or 2."); - } - } - - return builder.build(); - } -} diff --git a/packages/ml-vision/android/src/reactnative/AndroidManifest.xml b/packages/ml-vision/android/src/reactnative/AndroidManifest.xml deleted file mode 100644 index 35065179e9..0000000000 --- a/packages/ml-vision/android/src/reactnative/AndroidManifest.xml +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java deleted file mode 100644 index 04d7a3922b..0000000000 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionImageLabelerModule.java +++ /dev/null @@ -1,73 +0,0 @@ -package io.invertase.firebase.ml.vision; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.bridge.*; -import io.invertase.firebase.common.ReactNativeFirebaseModule; - -public class RNFirebaseMLVisionImageLabelerModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionImageLabeler"; - private final UniversalFirebaseMLVisionImageLabelerModule module; - - RNFirebaseMLVisionImageLabelerModule(ReactApplicationContext reactContext) { - super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionImageLabelerModule(reactContext, SERVICE_NAME); - } - - @ReactMethod - public void imageLabelerProcessImage(String appName, String stringUri, ReadableMap imageLabelerOptions, Promise promise) { - this.module.imageLabelerProcessImage(appName, stringUri, Arguments.toBundle(imageLabelerOptions)) - .addOnCompleteListener(task -> { - if (task.isSuccessful()) { - promise.resolve( - Arguments.makeNativeArray(task.getResult()) - ); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } - - @ReactMethod - public void cloudImageLabelerProcessImage(String appName, String stringUri, ReadableMap cloudImageLabelerOptions, Promise promise) { - this.module.cloudImageLabelerProcessImage(appName, stringUri, Arguments.toBundle(cloudImageLabelerOptions)) - .addOnCompleteListener(task -> { - if (task.isSuccessful()) { - promise.resolve( - Arguments.makeNativeArray(task.getResult()) - ); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } -} diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java deleted file mode 100644 index bd48145165..0000000000 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionTextRecognizerModule.java +++ /dev/null @@ -1,78 +0,0 @@ -package io.invertase.firebase.ml.vision; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.bridge.*; -import io.invertase.firebase.common.ReactNativeFirebaseModule; - -public class RNFirebaseMLVisionTextRecognizerModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionTextRecognizer"; - private final UniversalFirebaseMLVisionTextRecognizerModule module; - - RNFirebaseMLVisionTextRecognizerModule(ReactApplicationContext reactContext) { - super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionTextRecognizerModule(reactContext, SERVICE_NAME); - } - - @ReactMethod - public void textRecognizerProcessImage( - String appName, - String stringUri, - Promise promise - ) { - module.textRecognizerProcessImage(appName, stringUri) - .addOnCompleteListener(getExecutor(), task -> { - if (task.isSuccessful()) { - promise.resolve(Arguments.makeNativeMap(task.getResult())); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } - - @ReactMethod - public void cloudTextRecognizerProcessImage( - String appName, - String stringUri, - ReadableMap cloudTextRecognizerOptions, - Promise promise - ) { - module.cloudTextRecognizerProcessImage(appName, stringUri, Arguments.toBundle(cloudTextRecognizerOptions)) - .addOnCompleteListener(getExecutor(), task -> { - if (task.isSuccessful()) { - promise.resolve(Arguments.makeNativeMap(task.getResult())); - } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( - task.getException()); - rejectPromiseWithCodeAndMessage( - promise, - errorCodeAndMessage[0], - errorCodeAndMessage[1], - errorCodeAndMessage[2] - ); - } - }); - } -} diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java b/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java deleted file mode 100644 index 212722a828..0000000000 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/ReactNativeFirebaseMLVisionPackage.java +++ /dev/null @@ -1,62 +0,0 @@ -package io.invertase.firebase.ml.vision; - -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import com.facebook.react.ReactPackage; -import com.facebook.react.bridge.NativeModule; -import com.facebook.react.bridge.ReactApplicationContext; -import com.facebook.react.uimanager.ViewManager; -import io.invertase.firebase.common.ReactNativeFirebaseJSON; - -import javax.annotation.Nonnull; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -@SuppressWarnings("unused") -public class ReactNativeFirebaseMLVisionPackage implements ReactPackage { - @Nonnull - @Override - public List createNativeModules(@Nonnull ReactApplicationContext reactContext) { - List modules = new ArrayList<>(); - modules.add(new RNFirebaseMLVisionBarcodeDetectorModule(reactContext)); - modules.add(new RNFirebaseMLVisionTextRecognizerModule(reactContext)); - modules.add(new RNFirebaseMLVisionLandmarkRecognizerModule(reactContext)); - modules.add(new RNFirebaseMLVisionDocumentTextRecognizerModule(reactContext)); - - if (ReactNativeFirebaseJSON - .getSharedInstance() - .getBooleanValue("ml_vision_face_model", false)) { - modules.add(new RNFirebaseMLVisionFaceDetectorModule(reactContext)); - } - - if (ReactNativeFirebaseJSON - .getSharedInstance() - .getBooleanValue("ml_vision_image_label_model", false)) { - modules.add(new RNFirebaseMLVisionImageLabelerModule(reactContext)); - } - - return modules; - } - - @Nonnull - @Override - public List createViewManagers(@Nonnull ReactApplicationContext reactContext) { - return Collections.emptyList(); - } -} diff --git a/packages/ml-vision/e2e/barcode.e2e.js b/packages/ml-vision/e2e/barcode.e2e.js deleted file mode 100644 index eb471ce427..0000000000 --- a/packages/ml-vision/e2e/barcode.e2e.js +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -let testImageFile; - -function barcodeValidate(barcode) { - barcode.should.be.Object(); - - barcode.boundingBox.should.be.Array(); - barcode.boundingBox.length.should.eql(4); - barcode.boundingBox.forEach($ => $.should.be.Number()); - - barcode.cornerPoints.should.be.Array(); - barcode.cornerPoints.length.should.eql(4); - barcode.cornerPoints.forEach($ => { - $.should.be.Array(); - $.length.should.eql(2); - $.forEach(_ => _.should.be.Number()); - }); - - barcode.format.should.be.Number(); - barcode.valueType.should.be.Number(); - - barcode.displayValue.should.be.String(); - barcode.rawValue.should.be.String(); -} - -describe('mlkit.vision.barcode', () => { - before(async () => { - testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/barcode.png`; - await firebase - .storage() - .ref('vision/barcode.png') - .writeToFile(testImageFile); - }); - - describe('barcodeDetectorProcessImage()', () => { - it('should throw if image path is not a string', () => { - try { - firebase.vision().barcodeDetectorProcessImage(123); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'localImageFilePath' expected a string local file path"); - return Promise.resolve(); - } - }); - - it('should return a valid response', async () => { - const res = await firebase.vision().barcodeDetectorProcessImage(testImageFile); - - res.should.be.Array(); - res.length.should.be.greaterThan(0); - res.forEach($ => barcodeValidate($)); - }); - }); - - describe('VisionBarcodeDetectorOptions', () => { - it('throws if not an object', async () => { - try { - await firebase.vision().barcodeDetectorProcessImage(testImageFile, '123'); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'barcodeDetectorOptions' expected an object value"); - return Promise.resolve(); - } - }); - - describe('barcodeFormats', () => { - it('should throw if not an array', async () => { - try { - await firebase.vision().barcodeDetectorProcessImage(testImageFile, { - barcodeFormats: 'foo', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'barcodeDetectorOptions.barcodeFormats' must be an array of VisionBarcodeFormat types", - ); - return Promise.resolve(); - } - }); - - it('should throw if array item is invalid type', async () => { - try { - await firebase.vision().barcodeDetectorProcessImage(testImageFile, { - barcodeFormats: [firebase.vision.VisionBarcodeFormat.AZTEC, 'foobar'], - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'barcodeDetectorOptions.barcodeFormats' type at index 1 is invalid", - ); - return Promise.resolve(); - } - }); - - it('sets formats', async () => { - await firebase.vision().barcodeDetectorProcessImage(testImageFile, { - barcodeFormats: [ - firebase.vision.VisionBarcodeFormat.AZTEC, - firebase.vision.VisionBarcodeFormat.DATA_MATRIX, - ], - }); - }); - }); - }); -}); diff --git a/packages/ml-vision/e2e/face.e2e.js b/packages/ml-vision/e2e/face.e2e.js deleted file mode 100644 index 5727021246..0000000000 --- a/packages/ml-vision/e2e/face.e2e.js +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -let testImageFile; - -describe('mlkit.vision.face', () => { - before(async () => { - testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/faces.jpg`; - await firebase - .storage() - .ref('vision/faces.jpg') - .writeToFile(testImageFile); - }); - - describe('faceDetectorProcessImage()', () => { - it('should throw if image path is not a string', () => { - try { - firebase.vision().faceDetectorProcessImage(123); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'localImageFilePath' expected a string local file path"); - return Promise.resolve(); - } - }); - - it('returns basic face object with no options enabled', async () => { - const res = await firebase.vision().faceDetectorProcessImage(testImageFile); - - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - // Currently disabled - i.trackingId.should.eql(-1); - - i.rightEyeOpenProbability.should.eql(-1); - i.leftEyeOpenProbability.should.eql(-1); - i.smilingProbability.should.eql(-1); - - i.landmarks.length.should.eql(0); - i.faceContours.length.should.eql(0); - - i.boundingBox.length.should.eql(4); - - i.headEulerAngleZ.should.be.Number(); - i.headEulerAngleY.should.be.Number(); - }); - }); - - it('returns classifications if enabled', async () => { - const res = await firebase.vision().faceDetectorProcessImage(testImageFile, { - classificationMode: 2, - }); - - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - i.rightEyeOpenProbability.should.greaterThan(-1); - i.leftEyeOpenProbability.should.greaterThan(-1); - i.smilingProbability.should.greaterThan(-1); - }); - }); - - it('returns landmarks if enabled', async () => { - const res = await firebase.vision().faceDetectorProcessImage(testImageFile, { - landmarkMode: 2, - }); - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - i.landmarks.length.should.be.greaterThan(0); - - i.landmarks.forEach(l => { - l.type.should.be.Number(); - l.type.should.be.greaterThan(-1); - l.position.length.should.be.eql(2); - l.position.forEach(p => p.should.be.Number()); - }); - }); - }); - - it('returns contours if enabled', async () => { - const res = await firebase.vision().faceDetectorProcessImage(testImageFile, { - contourMode: 2, - }); - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - i.faceContours.length.should.be.greaterThan(0); - - i.faceContours.forEach(l => { - l.type.should.be.Number(); - l.type.should.be.greaterThan(-1); - l.points.length.should.be.greaterThan(1); - l.points.forEach(p => { - p.should.be.Array(); - p.length.should.be.eql(2); - }); - }); - }); - }); - }); - - describe('VisionFaceDetectorOptions', () => { - it('throws if not an object', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, '123'); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'faceDetectorOptions' expected an object value"); - return Promise.resolve(); - } - }); - - describe('classificationMode', () => { - it('throws if mode is incorrect', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - classificationMode: 'foo', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'faceDetectorOptions.classificationMode' invalid classification mode", - ); - return Promise.resolve(); - } - }); - - it('sets classificationMode', async () => { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - classificationMode: - firebase.vision.VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS, - }); - - await firebase.vision().faceDetectorProcessImage(testImageFile, { - classificationMode: - firebase.vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS, - }); - }); - }); - - describe('contourMode', () => { - it('throws if mode is incorrect', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - contourMode: 'foo', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'faceDetectorOptions.contourMode' invalid contour mode"); - return Promise.resolve(); - } - }); - - it('sets contourMode', async () => { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - contourMode: firebase.vision.VisionFaceDetectorContourMode.NO_CONTOURS, - }); - - await firebase.vision().faceDetectorProcessImage(testImageFile, { - contourMode: firebase.vision.VisionFaceDetectorContourMode.ALL_CONTOURS, - }); - }); - }); - - describe('performanceMode', () => { - it('throws if mode is incorrect', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - performanceMode: 'foo', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'faceDetectorOptions.performanceMode' invalid performance mode", - ); - return Promise.resolve(); - } - }); - - it('sets performanceMode', async () => { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - performanceMode: firebase.vision.VisionFaceDetectorPerformanceMode.FAST, - }); - - await firebase.vision().faceDetectorProcessImage(testImageFile, { - performanceMode: firebase.vision.VisionFaceDetectorPerformanceMode.ACCURATE, - }); - }); - }); - - describe('landmarkMode', () => { - it('throws if mode is incorrect', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - landmarkMode: 'foo', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'faceDetectorOptions.landmarkMode' invalid landmark mode", - ); - return Promise.resolve(); - } - }); - - it('sets landmarkMode', async () => { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - landmarkMode: firebase.vision.VisionFaceDetectorLandmarkMode.NO_LANDMARKS, - }); - - await firebase.vision().faceDetectorProcessImage(testImageFile, { - landmarkMode: firebase.vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS, - }); - }); - }); - - describe('minFaceSize', () => { - it('throws if size is not a number', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - minFaceSize: '0.1', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'faceDetectorOptions.minFaceSize' expected a number value between 0 & 1", - ); - return Promise.resolve(); - } - }); - - it('throws if size is not valid', async () => { - try { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - minFaceSize: -1, - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'faceDetectorOptions.minFaceSize' expected value to be between 0 & 1", - ); - return Promise.resolve(); - } - }); - - it('sets minFaceSize', async () => { - await firebase.vision().faceDetectorProcessImage(testImageFile, { - minFaceSize: 0.3, - }); - }); - }); - }); -}); diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj deleted file mode 100644 index 90c5b75511..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.pbxproj +++ /dev/null @@ -1,384 +0,0 @@ -// !$*UTF8*$! -{ - archiveVersion = 1; - classes = { - }; - objectVersion = 48; - objects = { - -/* Begin PBXBuildFile section */ - 8B06D3F322F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */; }; - 8B06D3FC22F863AE00A5B542 /* RNFBMLVisionCommon.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */; }; - 8B06D40022F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */; }; - 8B06D40622F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */; }; - 8B06D40A22F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */; }; - 8B06D40E22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */; }; - 8B06D41222F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */; }; -/* End PBXBuildFile section */ - -/* Begin PBXCopyFilesBuildPhase section */ - 2744B98021F45429004F8E3F /* CopyFiles */ = { - isa = PBXCopyFilesBuildPhase; - buildActionMask = 2147483647; - dstPath = ""; - dstSubfolderSpec = 16; - files = ( - ); - runOnlyForDeploymentPostprocessing = 0; - }; -/* End PBXCopyFilesBuildPhase section */ - -/* Begin PBXFileReference section */ - 2744B98221F45429004F8E3F /* libRNFBMLVision.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBMLVision.a; sourceTree = BUILT_PRODUCTS_DIR; }; - 8B06D3F122F84F6500A5B542 /* RNFBMLVisionLandmarkRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionLandmarkRecognizerModule.h; sourceTree = ""; }; - 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionLandmarkRecognizerModule.m; sourceTree = ""; }; - 8B06D3FA22F863A400A5B542 /* RNFBMLVisionCommon.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionCommon.h; sourceTree = ""; }; - 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionCommon.m; sourceTree = ""; }; - 8B06D3FE22F8747F00A5B542 /* RNFBMLVisionFaceDetectorModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionFaceDetectorModule.h; sourceTree = ""; }; - 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionFaceDetectorModule.m; sourceTree = ""; }; - 8B06D40422F97B3600A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionDocumentTextRecognizerModule.h; sourceTree = ""; }; - 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionDocumentTextRecognizerModule.m; sourceTree = ""; }; - 8B06D40822F989E400A5B542 /* RNFBMLVisionTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionTextRecognizerModule.h; sourceTree = ""; }; - 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionTextRecognizerModule.m; sourceTree = ""; }; - 8B06D40C22F99DEF00A5B542 /* RNFBMLVisionImageLabelerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionImageLabelerModule.h; sourceTree = ""; }; - 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionImageLabelerModule.m; sourceTree = ""; }; - 8B06D41022F9A14B00A5B542 /* RNFBMLVisionBarcodeDetectorModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLVisionBarcodeDetectorModule.h; sourceTree = ""; }; - 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLVisionBarcodeDetectorModule.m; sourceTree = ""; }; -/* End PBXFileReference section */ - -/* Begin PBXFrameworksBuildPhase section */ - 2744B97F21F45429004F8E3F /* Frameworks */ = { - isa = PBXFrameworksBuildPhase; - buildActionMask = 2147483647; - files = ( - ); - runOnlyForDeploymentPostprocessing = 0; - }; -/* End PBXFrameworksBuildPhase section */ - -/* Begin PBXGroup section */ - 2744B97521F452B8004F8E3F /* Products */ = { - isa = PBXGroup; - children = ( - 2744B98221F45429004F8E3F /* libRNFBMLVision.a */, - ); - name = Products; - sourceTree = ""; - }; - 2744B98321F45429004F8E3F /* RNFBMLVision */ = { - isa = PBXGroup; - children = ( - 8B06D3F122F84F6500A5B542 /* RNFBMLVisionLandmarkRecognizerModule.h */, - 8B06D3F222F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m */, - 8B06D3FA22F863A400A5B542 /* RNFBMLVisionCommon.h */, - 8B06D3FB22F863AE00A5B542 /* RNFBMLVisionCommon.m */, - 8B06D3FE22F8747F00A5B542 /* RNFBMLVisionFaceDetectorModule.h */, - 8B06D3FF22F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m */, - 8B06D40422F97B3600A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.h */, - 8B06D40522F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m */, - 8B06D40822F989E400A5B542 /* RNFBMLVisionTextRecognizerModule.h */, - 8B06D40922F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m */, - 8B06D40C22F99DEF00A5B542 /* RNFBMLVisionImageLabelerModule.h */, - 8B06D40D22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m */, - 8B06D41022F9A14B00A5B542 /* RNFBMLVisionBarcodeDetectorModule.h */, - 8B06D41122F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m */, - ); - path = RNFBMLVision; - sourceTree = ""; - }; - 3323F52AAFE26B7384BE4DE3 = { - isa = PBXGroup; - children = ( - 2744B98321F45429004F8E3F /* RNFBMLVision */, - 2744B97521F452B8004F8E3F /* Products */, - ); - sourceTree = ""; - }; -/* End PBXGroup section */ - -/* Begin PBXNativeTarget section */ - 2744B98121F45429004F8E3F /* RNFBMLVision */ = { - isa = PBXNativeTarget; - buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLVision" */; - buildPhases = ( - 2744B97E21F45429004F8E3F /* Sources */, - 2744B97F21F45429004F8E3F /* Frameworks */, - 2744B98021F45429004F8E3F /* CopyFiles */, - ); - buildRules = ( - ); - dependencies = ( - ); - name = RNFBMLVision; - productName = RNFBMLVision; - productReference = 2744B98221F45429004F8E3F /* libRNFBMLVision.a */; - productType = "com.apple.product-type.library.static"; - }; -/* End PBXNativeTarget section */ - -/* Begin PBXProject section */ - 3323F95273A95DB34F55C6D7 /* Project object */ = { - isa = PBXProject; - attributes = { - CLASSPREFIX = RNFBMLVision; - LastUpgradeCheck = 1010; - ORGANIZATIONNAME = Invertase; - TargetAttributes = { - 2744B98121F45429004F8E3F = { - CreatedOnToolsVersion = 10.1; - ProvisioningStyle = Automatic; - }; - }; - }; - buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLVision" */; - compatibilityVersion = "Xcode 8.0"; - developmentRegion = English; - hasScannedForEncodings = 0; - knownRegions = ( - English, - en, - ); - mainGroup = 3323F52AAFE26B7384BE4DE3; - productRefGroup = 2744B97521F452B8004F8E3F /* Products */; - projectDirPath = ""; - projectRoot = ""; - targets = ( - 2744B98121F45429004F8E3F /* RNFBMLVision */, - ); - }; -/* End PBXProject section */ - -/* Begin PBXSourcesBuildPhase section */ - 2744B97E21F45429004F8E3F /* Sources */ = { - isa = PBXSourcesBuildPhase; - buildActionMask = 2147483647; - files = ( - 8B06D40E22F99DF900A5B542 /* RNFBMLVisionImageLabelerModule.m in Sources */, - 8B06D40622F97B4900A5B542 /* RNFBMLVisionDocumentTextRecognizerModule.m in Sources */, - 8B06D40A22F989EF00A5B542 /* RNFBMLVisionTextRecognizerModule.m in Sources */, - 8B06D3F322F84F7200A5B542 /* RNFBMLVisionLandmarkRecognizerModule.m in Sources */, - 8B06D3FC22F863AE00A5B542 /* RNFBMLVisionCommon.m in Sources */, - 8B06D40022F8748C00A5B542 /* RNFBMLVisionFaceDetectorModule.m in Sources */, - 8B06D41222F9A15A00A5B542 /* RNFBMLVisionBarcodeDetectorModule.m in Sources */, - ); - runOnlyForDeploymentPostprocessing = 0; - }; -/* End PBXSourcesBuildPhase section */ - -/* Begin XCBuildConfiguration section */ - 2744B98921F45429004F8E3F /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; - CLANG_CXX_LIBRARY = "libc++"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CODE_SIGN_IDENTITY = "iPhone Developer"; - CODE_SIGN_STYLE = Automatic; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = dwarf; - GCC_C_LANGUAGE_STANDARD = gnu11; - GCC_DYNAMIC_NO_PIC = NO; - GCC_OPTIMIZATION_LEVEL = 0; - GCC_PREPROCESSOR_DEFINITIONS = ( - "DEBUG=1", - "$(inherited)", - ); - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - IPHONEOS_DEPLOYMENT_TARGET = 10.0; - MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; - MTL_FAST_MATH = YES; - OTHER_LDFLAGS = "-ObjC"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SDKROOT = iphoneos; - SKIP_INSTALL = YES; - TARGETED_DEVICE_FAMILY = "1,2"; - }; - name = Debug; - }; - 2744B98A21F45429004F8E3F /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - ALWAYS_SEARCH_USER_PATHS = NO; - CLANG_ANALYZER_NONNULL = YES; - CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; - CLANG_CXX_LIBRARY = "libc++"; - CLANG_ENABLE_MODULES = YES; - CLANG_ENABLE_OBJC_ARC = YES; - CLANG_ENABLE_OBJC_WEAK = YES; - CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; - CLANG_WARN_DOCUMENTATION_COMMENTS = YES; - CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; - CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; - CODE_SIGN_IDENTITY = "iPhone Developer"; - CODE_SIGN_STYLE = Automatic; - COPY_PHASE_STRIP = NO; - DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; - ENABLE_NS_ASSERTIONS = NO; - GCC_C_LANGUAGE_STANDARD = gnu11; - GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; - GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; - IPHONEOS_DEPLOYMENT_TARGET = 10.0; - MTL_ENABLE_DEBUG_INFO = NO; - MTL_FAST_MATH = YES; - OTHER_LDFLAGS = "-ObjC"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SDKROOT = iphoneos; - SKIP_INSTALL = YES; - TARGETED_DEVICE_FAMILY = "1,2"; - VALIDATE_PRODUCT = YES; - }; - name = Release; - }; - 3323F77D701E1896E6D239CF /* Release */ = { - isa = XCBuildConfiguration; - buildSettings = { - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - ENABLE_STRICT_OBJC_MSGSEND = YES; - FRAMEWORK_SEARCH_PATHS = ( - "$(inherited)", - "${BUILT_PRODUCTS_DIR}/**", - "${SRCROOT}/../../../ios/Firebase/**", - "$(FIREBASE_SEARCH_PATH)/Firebase/**", - "$(SRCROOT)/../../../ios/Pods/FirebaseMlkitLanguage/Frameworks", - "$(SRCROOT)/../../../tests/ios/Pods/FirebaseMlkitLanguage/Frameworks", - ); - GCC_NO_COMMON_BLOCKS = YES; - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - HEADER_SEARCH_PATHS = ( - "$(inherited)", - "$(REACT_SEARCH_PATH)/React/**", - "$(SRCROOT)/../../react-native/React/**", - "$(SRCROOT)/../../react-native-firebase/ios/**", - "$(FIREBASE_SEARCH_PATH)/Firebase/**", - "${SRCROOT}/../../../ios/Firebase/**", - "${SRCROOT}/../../../ios/Pods/Headers/Public/**", - "${SRCROOT}/../../../tests/ios/Pods/Headers/Public/**", - "$(SRCROOT)/../../../node_modules/react-native/React/**", - "$(SRCROOT)/../../../node_modules/react-native-firebase/ios/**", - "$(SRCROOT)/../../../packages/app/ios/**", - ); - IPHONEOS_DEPLOYMENT_TARGET = 10.0; - LIBRARY_SEARCH_PATHS = "$(inherited)"; - MACH_O_TYPE = staticlib; - OTHER_LDFLAGS = "$(inherited)"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SKIP_INSTALL = YES; - }; - name = Release; - }; - 3323F7E33E1559A2B9826720 /* Debug */ = { - isa = XCBuildConfiguration; - buildSettings = { - CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; - CLANG_WARN_BOOL_CONVERSION = YES; - CLANG_WARN_COMMA = YES; - CLANG_WARN_CONSTANT_CONVERSION = YES; - CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; - CLANG_WARN_EMPTY_BODY = YES; - CLANG_WARN_ENUM_CONVERSION = YES; - CLANG_WARN_INFINITE_RECURSION = YES; - CLANG_WARN_INT_CONVERSION = YES; - CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; - CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; - CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; - CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; - CLANG_WARN_STRICT_PROTOTYPES = YES; - CLANG_WARN_SUSPICIOUS_MOVE = YES; - CLANG_WARN_UNREACHABLE_CODE = YES; - CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - ENABLE_STRICT_OBJC_MSGSEND = YES; - ENABLE_TESTABILITY = YES; - FRAMEWORK_SEARCH_PATHS = ( - "$(inherited)", - "${BUILT_PRODUCTS_DIR}/**", - "${SRCROOT}/../../../ios/Firebase/**", - "$(FIREBASE_SEARCH_PATH)/Firebase/**", - "$(SRCROOT)/../../../ios/Pods/FirebaseMlkitLanguage/Frameworks", - ); - GCC_NO_COMMON_BLOCKS = YES; - GCC_WARN_64_TO_32_BIT_CONVERSION = YES; - GCC_WARN_ABOUT_RETURN_TYPE = YES; - GCC_WARN_UNDECLARED_SELECTOR = YES; - GCC_WARN_UNINITIALIZED_AUTOS = YES; - GCC_WARN_UNUSED_FUNCTION = YES; - GCC_WARN_UNUSED_VARIABLE = YES; - HEADER_SEARCH_PATHS = ( - "$(inherited)", - "$(REACT_SEARCH_PATH)/React/**", - "$(SRCROOT)/../../react-native/React/**", - "$(SRCROOT)/../../react-native-firebase/ios/**", - "$(FIREBASE_SEARCH_PATH)/Firebase/**", - "${SRCROOT}/../../../ios/Firebase/**", - "${SRCROOT}/../../../ios/Pods/Headers/Public/**", - "${SRCROOT}/../../../tests/ios/Pods/Headers/Public/**", - "$(SRCROOT)/../../../node_modules/react-native/React/**", - "$(SRCROOT)/../../../node_modules/react-native-firebase/ios/**", - "$(SRCROOT)/../../../packages/app/ios/**", - ); - IPHONEOS_DEPLOYMENT_TARGET = 10.0; - LIBRARY_SEARCH_PATHS = "$(inherited)"; - MACH_O_TYPE = staticlib; - ONLY_ACTIVE_ARCH = YES; - OTHER_LDFLAGS = "$(inherited)"; - PRODUCT_NAME = "$(TARGET_NAME)"; - SKIP_INSTALL = YES; - }; - name = Debug; - }; -/* End XCBuildConfiguration section */ - -/* Begin XCConfigurationList section */ - 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLVision" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 2744B98921F45429004F8E3F /* Debug */, - 2744B98A21F45429004F8E3F /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; - 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLVision" */ = { - isa = XCConfigurationList; - buildConfigurations = ( - 3323F7E33E1559A2B9826720 /* Debug */, - 3323F77D701E1896E6D239CF /* Release */, - ); - defaultConfigurationIsVisible = 0; - defaultConfigurationName = Release; - }; -/* End XCConfigurationList section */ - }; - rootObject = 3323F95273A95DB34F55C6D7 /* Project object */; -} diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata deleted file mode 100644 index 919434a625..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/contents.xcworkspacedata +++ /dev/null @@ -1,7 +0,0 @@ - - - - - diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist deleted file mode 100644 index 18d981003d..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist +++ /dev/null @@ -1,8 +0,0 @@ - - - - - IDEDidComputeMac32BitWarning - - - diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings deleted file mode 100644 index 0c67376eba..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings +++ /dev/null @@ -1,5 +0,0 @@ - - - - - diff --git a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist b/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist deleted file mode 100644 index 63f0a6e5dd..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision.xcodeproj/xcshareddata/IDETemplateMacros.plist +++ /dev/null @@ -1,24 +0,0 @@ - - - - - FILEHEADER - -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h deleted file mode 100644 index 7e40c1b65e..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.h +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import -#import - -@interface RNFBMLVisionBarcodeDetectorModule : NSObject - -@end \ No newline at end of file diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m deleted file mode 100644 index 23e0a0b16e..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionBarcodeDetectorModule.m +++ /dev/null @@ -1,257 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import -#import "RNFBMLVisionBarcodeDetectorModule.h" -#import "RNFBMLVisionCommon.h" - -@implementation RNFBMLVisionBarcodeDetectorModule -#pragma mark - -#pragma mark Module Setup - -RCT_EXPORT_MODULE(); - -#pragma mark - -#pragma mark Firebase ML Kit Vision Methods - -RCT_EXPORT_METHOD(barcodeDetectorProcessImage: - (FIRApp *) firebaseApp - : (NSString *)filePath - : (NSDictionary *)barcodeDetectorOptions - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { - if (errorCodeMessageArray != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": errorCodeMessageArray[0], - @"message": errorCodeMessageArray[1], - }]; - return; - } - - FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image]; - FIRVision *vision = [FIRVision visionForApp:firebaseApp]; - - FIRVisionBarcodeFormat barcodeFormat = nil; - - if (barcodeDetectorOptions[@"barcodeFormats"]) { - NSArray *formats = barcodeDetectorOptions[@"barcodeFormats"]; - for (id format in formats) { - if (barcodeFormat == nil) { - barcodeFormat = [format integerValue]; - } else { - barcodeFormat |= [format integerValue]; - } - } - } else { - barcodeFormat = FIRVisionBarcodeFormatAll; - } - - FIRVisionBarcodeDetectorOptions *options = [[FIRVisionBarcodeDetectorOptions alloc] initWithFormats:barcodeFormat]; - - FIRVisionBarcodeDetector *barcodeDetector = [vision barcodeDetectorWithOptions:options]; - [barcodeDetector detectInImage:visionImage completion:^(NSArray *barcodes, NSError *error) { - if (error != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; - return; - } - - if (barcodes == nil) { - resolve(@[]); - return; - } - - resolve([self getBarcodesList:barcodes]); - }]; - }]; -} - -- (NSArray *)getBarcodesList:(NSArray *)barcodes { - NSMutableArray *barcodeListFormatted = [[NSMutableArray alloc] init]; - - for (FIRVisionBarcode *barcode in barcodes) { - NSMutableDictionary *formattedBarcode = [[NSMutableDictionary alloc] init]; - - formattedBarcode[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:barcode.frame]; - formattedBarcode[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:barcode.cornerPoints]; - formattedBarcode[@"format"] = @(barcode.format); - formattedBarcode[@"valueType"] = @(barcode.valueType); - formattedBarcode[@"displayValue"] = barcode.displayValue; - formattedBarcode[@"rawValue"] = barcode.rawValue; - - if (barcode.email != nil) formattedBarcode[@"email"] = [self getEmailMap:barcode.email]; - if (barcode.phone != nil) formattedBarcode[@"phone"] = [self getPhoneMap:barcode.phone]; - if (barcode.sms != nil) formattedBarcode[@"sms"] = [self getSMSMap:barcode.sms]; - if (barcode.URL != nil) formattedBarcode[@"url"] = [self getURLMap:barcode.URL]; - if (barcode.wifi != nil) formattedBarcode[@"wifi"] = [self getWiFiMap:barcode.wifi]; - if (barcode.geoPoint != nil) formattedBarcode[@"geoPoint"] = [self getGeoPointList:barcode.geoPoint]; - if (barcode.contactInfo != nil) formattedBarcode[@"contactInfo"] = [self getContactInfoMap:barcode.contactInfo]; - if (barcode.calendarEvent != nil) formattedBarcode[@"calendarEvent"] = [self getCalendarEventMap:barcode.calendarEvent]; - if (barcode.driverLicense != nil) formattedBarcode[@"driverLicense"] = [self getDriverLicenseMap:barcode.driverLicense]; - - [barcodeListFormatted addObject:formattedBarcode]; - } - - return barcodeListFormatted; -} - -- (NSDictionary *)getEmailMap:(FIRVisionBarcodeEmail *)email { - return @{ - @"address": email.address ?: (id) [NSNull null], - @"body": email.body ?: (id) [NSNull null], - @"subject": email.subject ?: (id) [NSNull null], - }; -} - -- (NSDictionary *)getPhoneMap:(FIRVisionBarcodePhone *)phone { - return @{ - @"number": phone.number ?: (id) [NSNull null], - @"type": @(phone.type), - }; -} - -- (NSDictionary *)getSMSMap:(FIRVisionBarcodeSMS *)sms { - return @{ - @"message": sms.message ?: (id) [NSNull null], - @"phoneNumber": sms.phoneNumber ?: (id) [NSNull null], - }; -} - -- (NSDictionary *)getURLMap:(FIRVisionBarcodeURLBookmark *)url { - return @{ - @"title": url.title ?: (id) [NSNull null], - @"url": url.url ?: (id) [NSNull null], - }; -} - -- (NSDictionary *)getWiFiMap:(FIRVisionBarcodeWiFi *)wifi { - return @{ - @"encryptionType": @(wifi.type), - @"password": wifi.password ?: (id) [NSNull null], - @"ssid": wifi.ssid ?: (id) [NSNull null], - }; -} - -- (NSArray *)getGeoPointList:(FIRVisionBarcodeGeoPoint *)geoPoint { - return @[@(geoPoint.latitude), @(geoPoint.longitude)]; -} - -- (NSDictionary *)getPersonNameMap:(FIRVisionBarcodePersonName *)name { - return @{ - @"first": name.first ?: (id) [NSNull null], - @"formatted": name.formattedName ?: (id) [NSNull null], - @"last": name.last ?: (id) [NSNull null], - @"middle": name.middle ?: (id) [NSNull null], - @"prefix": name.prefix ?: (id) [NSNull null], - @"pronunciation": name.pronounciation ?: (id) [NSNull null], - @"suffix": name.suffix ?: (id) [NSNull null], - }; -} - -- (NSDictionary *)getAddressMap:(FIRVisionBarcodeAddress *)address { - return @{ - @"lines": address.addressLines ?: @[], - @"type": @(address.type), - }; -} - -- (NSDictionary *)getContactInfoMap:(FIRVisionBarcodeContactInfo *)contactInfo { - NSMutableDictionary *contactInfoFormatted = [@{ - @"title": contactInfo.jobTitle ?: (id) [NSNull null], - @"organisation": contactInfo.organization ?: (id) [NSNull null], - } mutableCopy]; - - // Name - if (contactInfo.name != nil) { - contactInfoFormatted[@"name"] = [self getPersonNameMap:contactInfo.name]; - } - - // URLs - NSMutableArray *urls = [@[] mutableCopy]; - if (contactInfo.urls != nil) { - for (NSString *url in contactInfo.urls) { - [urls addObject:url]; - } - } - contactInfoFormatted[@"urls"] = urls; - - // Phones - NSMutableArray *phones = [@[] mutableCopy]; - if (contactInfo.phones != nil) { - for (FIRVisionBarcodePhone *phone in contactInfo.phones) { - [phones addObject:[self getPhoneMap:phone]]; - } - } - contactInfoFormatted[@"phones"] = phones; - - // Emails - NSMutableArray *emails = [@[] mutableCopy]; - if (contactInfo.emails != nil) { - for (FIRVisionBarcodeEmail *email in contactInfo.emails) { - [emails addObject:[self getEmailMap:email]]; - } - } - contactInfoFormatted[@"emails"] = phones; - - // Addresses - NSMutableArray *addresses = [@[] mutableCopy]; - if (contactInfo.addresses != nil) { - for (FIRVisionBarcodeAddress *address in contactInfo.addresses) { - [emails addObject:[self getAddressMap:address]]; - } - } - contactInfoFormatted[@"addresses"] = addresses; - - return contactInfoFormatted; -} - -- (NSDictionary *)getCalendarEventMap:(FIRVisionBarcodeCalendarEvent *)event { - return @{ - @"description": event.description ?: (id) [NSNull null], - @"end": event.end ? [RNFBSharedUtils getISO8601String:event.end] : (id) [NSNull null], - @"location": event.location ?: (id) [NSNull null], - @"organizer": event.organizer ?: (id) [NSNull null], - @"start": event.start ? [RNFBSharedUtils getISO8601String:event.start] : (id) [NSNull null], - @"status": event.status ?: (id) [NSNull null], - @"summary": event.summary ?: (id) [NSNull null], - }; -} - -- (NSDictionary *)getDriverLicenseMap:(FIRVisionBarcodeDriverLicense *)license { - return @{ - @"addressCity": license.addressCity ?: (id) [NSNull null], - @"addressState": license.addressState ?: (id) [NSNull null], - @"addressZip": license.addressZip ?: (id) [NSNull null], - @"birthDate": license.birthDate ?: (id) [NSNull null], - @"documentType": license.documentType ?: (id) [NSNull null], - @"expiryDate": license.expiryDate ?: (id) [NSNull null], - @"firstName": license.firstName ?: (id) [NSNull null], - @"gender": license.gender ?: (id) [NSNull null], - @"issueDate": license.issuingDate ?: (id) [NSNull null], - @"issuingCountry": license.issuingCountry ?: (id) [NSNull null], - @"lastName": license.lastName ?: (id) [NSNull null], - @"licenseNumber": license.licenseNumber ?: (id) [NSNull null], - @"middleName": license.middleName ?: (id) [NSNull null], - }; -} - -@end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m deleted file mode 100644 index 82fe1da212..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.m +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - - -#import -#import -#import "RNFBMLVisionCommon.h" - -@implementation RNFBMLVisionCommon - -+ (NSArray *)rectToIntArray:(CGRect)rect { - CGSize size = rect.size; - CGPoint point = rect.origin; - return @[@(point.x), @(point.y), @(point.x + size.width), @(point.y + size.height)]; -} - -+ (NSDictionary *)contourToDict:(FIRVisionFaceContour *)visionFaceContour { - NSMutableDictionary *visionFaceContourDict = [[NSMutableDictionary alloc] init]; - - if (visionFaceContour == nil) { - return visionFaceContourDict; - } - - NSMutableArray *pointsFormatted = [[NSMutableArray alloc] init]; - for (FIRVisionPoint *point in visionFaceContour.points) { - [pointsFormatted addObject:[self arrayForFIRVisionPoint:point]]; - } - - visionFaceContourDict[@"type"] = [self contourTypeToInt:visionFaceContour.type]; - visionFaceContourDict[@"points"] = pointsFormatted; - - return visionFaceContourDict; -} - -+ (NSNumber *)contourTypeToInt:(NSString *)faceContourType { - if ([@"All" isEqualToString:faceContourType]) { - return @1; - } - if ([@"Face" isEqualToString:faceContourType]) { - return @2; - } - if ([@"LeftEyebrowTop" isEqualToString:faceContourType]) { - return @3; - } - if ([@"LeftEyebrowBottom" isEqualToString:faceContourType]) { - return @4; - } - if ([@"RightEyebrowTop" isEqualToString:faceContourType]) { - return @5; - } - if ([@"RightEyebrowBottom" isEqualToString:faceContourType]) { - return @6; - } - if ([@"LeftEye" isEqualToString:faceContourType]) { - return @7; - } - if ([@"RightEye" isEqualToString:faceContourType]) { - return @8; - } - if ([@"UpperLipTop" isEqualToString:faceContourType]) { - return @9; - } - if ([@"UpperLipBottom" isEqualToString:faceContourType]) { - return @10; - } - if ([@"LowerLipTop" isEqualToString:faceContourType]) { - return @11; - } - if ([@"LowerLipBottom" isEqualToString:faceContourType]) { - return @12; - } - if ([@"NoseBridge" isEqualToString:faceContourType]) { - return @13; - } - if ([@"NoseBottom" isEqualToString:faceContourType]) { - return @14; - } - return @-1; -} - -+ (NSDictionary *)landmarkToDict:(FIRVisionFaceLandmark *)visionFaceLandmark { - NSMutableDictionary *visionFaceLandmarkDict = [[NSMutableDictionary alloc] init]; - - if (visionFaceLandmark == nil) { - return visionFaceLandmarkDict; - } - - visionFaceLandmarkDict[@"type"] = [self landmarkTypeToInt:visionFaceLandmark.type]; - visionFaceLandmarkDict[@"position"] = [self arrayForFIRVisionPoint:visionFaceLandmark.position]; - return visionFaceLandmarkDict; -} - -+ (NSNumber *)landmarkTypeToInt:(NSString *)faceLandmarkType { - if ([@"MouthBottom" isEqualToString:faceLandmarkType]) { - return @0; - } - if ([@"MouthRight" isEqualToString:faceLandmarkType]) { - return @11; - } - if ([@"MouthLeft" isEqualToString:faceLandmarkType]) { - return @5; - } - if ([@"LeftEar" isEqualToString:faceLandmarkType]) { - return @3; - } - if ([@"RightEar" isEqualToString:faceLandmarkType]) { - return @9; - } - if ([@"LeftEye" isEqualToString:faceLandmarkType]) { - return @4; - } - if ([@"RightEye" isEqualToString:faceLandmarkType]) { - return @10; - } - if ([@"LeftCheek" isEqualToString:faceLandmarkType]) { - return @1; - } - if ([@"RightCheek" isEqualToString:faceLandmarkType]) { - return @7; - } - if ([@"NoseBase" isEqualToString:faceLandmarkType]) { - return @6; - } - return @-1; -} - -+ (NSArray *)visionPointsToArray:(NSArray *_Nullable)points { - if (points == nil) { - return @[]; - } - - NSMutableArray *pointsArray = [[NSMutableArray alloc] init]; - for (NSValue *point in points) { - [pointsArray addObject:[self arrayForCGPoint:point.CGPointValue]]; - } - - return pointsArray; -} - -+ (NSArray *)arrayForCGPoint:(CGPoint)point { - return @[@(point.x), @(point.y)]; -} - -+ (NSArray *)arrayForFIRVisionPoint:(FIRVisionPoint *)point { - return @[point.x, point.y]; -} - -+ (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)( - NSArray *errorCodeMessageArray, - UIImage *image -))completion { - if (![[NSFileManager defaultManager] fileExistsAtPath:localFilePath]) { - completion(@[@"file-not-found", @"The local file specified does not exist on the device."], nil); - } else { - dispatch_async(dispatch_get_main_queue(), ^{ - completion(nil, [RCTConvert UIImage:localFilePath]); - }); - } -} - -@end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h deleted file mode 100644 index 1a4c094bfd..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.h +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import -#import - -@interface RNFBMLVisionDocumentTextRecognizerModule : NSObject - -@end \ No newline at end of file diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m deleted file mode 100644 index 891c3d334e..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.m +++ /dev/null @@ -1,149 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import -#import "RNFBMLVisionFaceDetectorModule.h" -#import "RNFBMLVisionCommon.h" - -@implementation RNFBMLVisionFaceDetectorModule -#pragma mark - -#pragma mark Module Setup - -RCT_EXPORT_MODULE(); - -#pragma mark - -#pragma mark Firebase ML Kit Vision Methods - -RCT_EXPORT_METHOD(faceDetectorProcessImage: - (FIRApp *) firebaseApp - : (NSString *)filePath - : (NSDictionary *)faceDetectorOptions - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { - if (errorCodeMessageArray != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": errorCodeMessageArray[0], - @"message": errorCodeMessageArray[1], - }]; - return; - } - - FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image]; - FIRVision *vision = [FIRVision visionForApp:firebaseApp]; - - FIRVisionFaceDetectorOptions *options = [[FIRVisionFaceDetectorOptions alloc] init]; - - NSInteger *classificationMode = [faceDetectorOptions[@"classificationMode"] integerValue]; - if (classificationMode == 1) { - options.classificationMode = FIRVisionFaceDetectorClassificationModeNone; - } else if (classificationMode == 2) { - options.classificationMode = FIRVisionFaceDetectorClassificationModeAll; - } - - NSInteger *contourMode = [faceDetectorOptions[@"contourMode"] integerValue]; - if (contourMode == 1) { - options.contourMode = FIRVisionFaceDetectorContourModeNone; - } else if (contourMode == 2) { - options.contourMode = FIRVisionFaceDetectorContourModeAll; - } - - NSInteger *landmarkMode = [faceDetectorOptions[@"landmarkMode"] integerValue]; - if (landmarkMode == 1) { - options.landmarkMode = FIRVisionFaceDetectorLandmarkModeNone; - } else if (landmarkMode == 2) { - options.landmarkMode = FIRVisionFaceDetectorLandmarkModeAll; - } - - NSInteger *performanceMode = [faceDetectorOptions[@"performanceMode"] integerValue]; - if (performanceMode == 1) { - options.performanceMode = FIRVisionFaceDetectorPerformanceModeFast; - } else if (performanceMode == 2) { - options.performanceMode = FIRVisionFaceDetectorPerformanceModeAccurate; - } - - options.minFaceSize = [faceDetectorOptions[@"minFaceSize"] doubleValue]; - - FIRVisionFaceDetector *faceDetector = [vision faceDetectorWithOptions:options]; - [faceDetector processImage:visionImage completion:^(NSArray *faces, NSError *error) { - if (error != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; - return; - } - - NSMutableArray *facesFormatted = [[NSMutableArray alloc] init]; - - for (FIRVisionFace *face in faces) { - NSMutableDictionary *visionFace = [[NSMutableDictionary alloc] init]; - - visionFace[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:face.frame]; - - visionFace[@"headEulerAngleY"] = face.hasHeadEulerAngleY ? @(face.headEulerAngleY) : @(-1); - visionFace[@"headEulerAngleZ"] = face.hasHeadEulerAngleZ ? @(face.headEulerAngleZ) : @(-1); - visionFace[@"leftEyeOpenProbability"] = face.hasLeftEyeOpenProbability ? @(face.leftEyeOpenProbability) : @(-1); - visionFace[@"rightEyeOpenProbability"] = face.hasRightEyeOpenProbability ? @(face.rightEyeOpenProbability) : @(-1); - visionFace[@"smilingProbability"] = face.hasSmilingProbability ? @(face.smilingProbability) : @(-1); - visionFace[@"trackingId"] = face.hasTrackingID ? @(face.trackingID) : @(-1); - - // Contours - NSMutableArray *faceContours = [[NSMutableArray alloc] init]; - if (contourMode == (NSInteger *) 2) { - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeAll]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeFace]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEyebrowTop]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEyebrowBottom]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEyebrowTop]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEyebrowBottom]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLeftEye]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeRightEye]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeUpperLipTop]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeUpperLipBottom]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLowerLipTop]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeLowerLipBottom]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeNoseBridge]]]; - [faceContours addObject:[RNFBMLVisionCommon contourToDict:[face contourOfType:FIRFaceContourTypeNoseBottom]]]; - } - visionFace[@"faceContours"] = faceContours; - - // Face Landmarks - NSMutableArray *faceLandmarks = [[NSMutableArray alloc] init]; - if (landmarkMode == (NSInteger *) 2) { - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthBottom]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthRight]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeMouthLeft]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeRightEye]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeLeftEye]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeRightCheek]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeLeftCheek]]]; - [faceLandmarks addObject:[RNFBMLVisionCommon landmarkToDict:[face landmarkOfType:FIRFaceLandmarkTypeNoseBase]]]; - } - visionFace[@"landmarks"] = faceLandmarks; - - [facesFormatted addObject:visionFace]; - } - - resolve(facesFormatted); - }]; - }]; -} - -@end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h b/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h deleted file mode 100644 index 29a772cf6f..0000000000 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.h +++ /dev/null @@ -1,24 +0,0 @@ -/** - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -#import -#import -#import - -@interface RNFBMLVisionTextRecognizerModule : NSObject - -@end \ No newline at end of file diff --git a/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts b/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts deleted file mode 100644 index 8e0cce321f..0000000000 --- a/packages/ml-vision/lib/BarcodeDetectorTypes.d.ts +++ /dev/null @@ -1,1029 +0,0 @@ -import { FirebaseVisionTypes } from '.'; - -/** - * Firebase ML Kit package for React Native. - * - * #### Example 1 - * - * Access the firebase export from the `ml-vision` package: - * - * ```js - * import { firebase } from '@react-native-firebase/ml-vision'; - * - * // firebase.vision().X - * ``` - * - * #### Example 2 - * - * Using the default export from the `ml-vision` package: - * - * ```js - * import vision from '@react-native-firebase/ml-vision'; - * - * // vision().X - * ``` - * - * #### Example 3 - * - * Using the default export from the `app` package: - * - * ```js - * import firebase from '@react-native-firebase/app'; - * import '@react-native-firebase/ml-vision'; - * - * // firebase.vision().X - * ``` - * - * @firebase ml-vision - */ -export namespace MLKitVision { - /** - * A representation of a barcode detected in an image. - * - * #### Example - * - * ```js - * const [barcode, ...otherBarcodes] = await firebase.vision().barcodeDetectorProcessImage(filePath); - * console.log(barcode); - * ``` - */ - export interface VisionBarcode { - /** - * Returns the bounding rectangle of the detected barcode. - */ - boundingBox: FirebaseVisionTypes.VisionRectangle; - - /** - * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image. - */ - cornerPoints: FirebaseVisionTypes.VisionPoint[]; - - /** - * Returns the barcode format, for example `VisionBarcodeFormat.QR_CODE` - * - * Use with `VisionBarcodeFormat` to switch based on format if needed. - */ - format: number; - - /** - * Returns type of the barcode value, for example `VisionBarcodeValueType.EMAIL`. - * - * If the value structure cannot be parsed, `VisionBarcodeValueType.TEXT` will be returned. - * If the recognized structure type is not defined in the current version of the native Firebase SDKs, `VisionBarcodeValueType.UNKNOWN` will be returned. - * - * Note that the built-in parsers only recognize a few popular value structures. For your specific use case, you might want to directly consume `rawValue` and implement your own parsing logic. - */ - valueType: number; - - /** - * Returns barcode value in a user-friendly format. - * - * May omit some of the information encoded in the barcode. For example, if `'rawValue returns `MEBKM:TITLE:Invertase;URL://invertase.io;;'`, the display_value might be `'//invertase.io'`. - * - * If `valueType` === `VisionBarcodeValueType.TEXT`, this field will be identical to `rawValue`. - * - * This value can also be multiline, for example, when line breaks are encoded into the original `TEXT` barcode value. - * - * Returns `null` if nothing found. - */ - displayValue: string | null; - - /** - * Returns barcode value as it was encoded in the barcode. - * - * Structured values are not parsed. - * - * Returns `null` if nothing found. - */ - rawValue: string | null; - - /** - * Gets parsed calendar event (set if `valueType` is `VisionBarcodeValueType.CALENDAR_EVENT`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) { - * console.log(barcode.calendarEvent); - * } - * ``` - */ - calendarEvent?: VisionBarcodeCalendarEvent; - - /** - * Gets parsed contact details (set if `valueType` is `VisionBarcodeValueType.CONTACT_INFO`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo); - * } - * ``` - */ - contactInfo?: VisionBarcodeContactInfo; - - /** - * Gets parsed drivers license details (set if `valueType` is `VisionBarcodeValueType.DRIVER_LICENSE`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) { - * console.log(barcode.driverLicense); - * } - * ``` - */ - driverLicense?: VisionBarcodeDriverLicense; - - /** - * Gets parsed email details (set if `valueType` is `VisionBarcodeValueType.EMAIL`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.EMAIL) { - * console.log(barcode.email); - * } - * ``` - */ - email?: VisionBarcodeEmail; - - /** - * Gets parsed Geo Point details (set if `valueType` is `VisionBarcodeValueType.GEO`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.GEO) { - * console.log(barcode.geoPoint); - * } - * ``` - */ - geoPoint?: FirebaseVisionTypes.VisionGeoPoint; - - /** - * Gets parsed phone details (set if `valueType` is `VisionBarcodeValueType.PHONE`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.PHONE) { - * console.log(barcode.phone); - * } - * ``` - */ - phone?: VisionBarcodePhone; - - /** - * Gets parsed sms details (set if `valueType` is `VisionBarcodeValueType.SMS`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.SMS) { - * console.log(barcode.sms); - * } - * ``` - */ - sms?: VisionBarcodeSms; - - /** - * Gets parsed url details (set if `valueType` is `VisionBarcodeValueType.URL`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.URL) { - * console.log(barcode.url); - * } - * ``` - */ - url?: VisionBarcodeUrl; - - /** - * Gets parsed wifi details (set if `valueType` is `VisionBarcodeValueType.WIFI`). - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.WIFI) { - * console.log(barcode.wifi); - * } - * ``` - */ - wifi?: VisionBarcodeWifi; - } - - /** - * Wifi network parameters from a 'WIFI:' or similar QRCode type. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.WIFI) { - * console.log(barcode.wifi); - * } - * ``` - */ - export interface VisionBarcodeWifi { - /** - * The encryption type of the WIFI. e.g. `VisionBarcodeWifiEncryptionType.WPA` - * - * See all types at `VisionBarcodeWifiEncryptionType`. - */ - encryptionType: number; - - /** - * The password for this WIFI. - * - * Returns `null` if nothing found. - */ - password: string | null; - - /** - * The SSID for this WIFI. - * - * Returns `null` if nothing found. - */ - ssid: string | null; - } - - /** - * A URL and title from a 'MEBKM:' or similar QRCode type. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.URL) { - * console.log(barcode.url); - * } - * ``` - */ - export interface VisionBarcodeUrl { - /** - * The title for this url. - * - * Returns `null` if nothing found. - */ - title: string | null; - - /** - * The URL. - * - * Returns `null` if nothing found. - */ - url: string | null; - } - - /** - * An sms message from an 'SMS:' or similar QRCode type. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.SMS) { - * console.log(barcode.sms); - * } - * ``` - */ - export interface VisionBarcodeSms { - /** - * The message text for this SMS. - * - * Returns `null` if nothing found. - */ - message: string | null; - - /** - * The phone number for this SMS. - * - * Returns `null` if nothing found. - */ - phoneNumber: string | number; - } - - /** - * A driver license or ID card. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.DRIVER_LICENSE) { - * console.log(barcode.driverLicense); - * } - * ``` - */ - export interface VisionBarcodeDriverLicense { - /** - * Gets city of holder's address. - * - * Returns `null` if nothing found. - */ - addressCity: string | null; - - /** - * Gets state of holder's address. - * - * Returns `null` if nothing found. - */ - addressState: string | null; - - /** - * The holder's street address. - * - * Returns `null` if nothing found. - */ - addressStreet: string | null; - - /** - * The zip code of holder's address. - * - * Returns `null` if nothing found. - */ - addressZip: string | null; - - /** - * The birth date of the holder. - * - * Returns `null` if nothing found. - */ - birthDate: string | null; - - /** - * The "DL" for driver licenses, "ID" for ID cards. - * - * Returns `null` if nothing found. - */ - documentType: string | null; - - /** - * The expiry date of the license. - * - * Returns `null` if nothing found. - */ - expiryDate: string | null; - - /** - * The holder's first name. - * - * Returns `null` if nothing found. - */ - firstName: string | null; - - /** - * The holder's gender. - * - * Returns `null` if nothing found. - */ - gender: string | null; - - /** - * The issue date of the license. - * - * Returns `null` if nothing found. - */ - issueDate: string | null; - - /** - * The country in which DL/ID was issued. - * - * Returns `null` if nothing found. - */ - issuingCountry: string | null; - - /** - * The holder's last name. - * - * Returns `null` if nothing found. - */ - lastName: string | null; - - /** - * The driver license ID number. - * - * Returns `null` if nothing found. - */ - licenseNumber: string | null; - - /** - * The holder's middle name. - * - * Returns `null` if nothing found. - */ - middleName: string | null; - } - - /** - * A calendar event extracted from QRCode. - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) { - * console.log(barcode.calendarEvent); - * } - * ``` - */ - export interface VisionBarcodeCalendarEvent { - /** - * The description of the calendar event. - * - * Returns `null` if nothing found. - */ - description: string | null; - - /** - * The end date time of the calendar event. - * - * Returns `null` if nothing found. - */ - end: string | null; - - /** - * The location of the calendar event. - * - * Returns `null` if nothing found. - */ - location: string | null; - - /** - * The organizer of the calendar event. - * - * Returns `null` if nothing found. - */ - organizer: string | null; - - /** - * The start date time of the calendar event. - * - * Returns `null` if nothing found. - */ - start: string | null; - - /** - * The status of the calendar event. - * - * Returns `null` if nothing found. - */ - status: string | null; - - /** - * The summary of the calendar event. - * - * Returns `null` if nothing found. - */ - summary: string | null; - } - - /** - * A persons or organization's business card. For example a VCARD. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo); - * } - * ``` - */ - export interface VisionBarcodeContactInfo { - /** - * Get an array of detected urls for the contact. - * - * Returns an empty array if nothing found; - */ - urls: string[]; - - /** - * Gets the contact persons title. E.g. `Dr` - * - * Returns `null` if no title detected. - */ - title: string | null; - - /** - * Gets the contact persons organization. - * - * Returns `null` if no organization detected. - */ - organization: string | null; - - /** - * Gets the contact persons phones. - * - * Returns an empty array if nothing found. - */ - phones: VisionBarcodePhone[]; - - /** - * Gets the contact persons emails. - * - * Returns an empty array if nothing found. - */ - emails: VisionBarcodeEmail[]; - - /** - * Gets the contact persons name. - */ - name: VisionBarcodePersonName; - - /** - * Gets an array of the contact persons addresses. - * - * Returns an empty array if nothing found. - */ - addresses: VisionBarcodeAddress[]; - } - - /** - * A contacts address. - */ - export interface VisionBarcodeAddress { - /** - * An array of address line strings of the formatted address. - */ - lines: string[]; - - /** - * The address type, e.g. `VisionBarcodeAddressType.WORK`. - */ - type: number; - } - - /** - * A persons name, both formatted version and their individual name components. - */ - export interface VisionBarcodePersonName { - /** - * The persons first name. - * - * Returns `null` if not found. - */ - first: string | null; - - /** - * A properly formatted name. - * - * Returns `null` if no name components found. - */ - formatted: string | null; - - /** - * The persons last name. - * - * Returns `null` if not found. - */ - last: string | null; - - /** - * The persons middle name. - * - * Returns `null` if not found. - */ - middle: string | null; - - /** - * The prefix of the name. - * - * Returns `null` if not found. - */ - prefix: string | null; - - /** - * Designates a text string to be set as the kana name in the phonebook. - */ - pronunciation: string | null; - - /** - * The suffix of the persons name. - * - * Returns `null` if not found. - */ - suffix: string | null; - } - - /** - * An email message from a 'MAILTO:' or similar QRCode type, or from a ContactInfo/VCARD. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.EMAIL) { - * console.log(barcode.email); - * } else if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo.emails[0]); - * } - * ``` - */ - export interface VisionBarcodeEmail { - /** - * The email address. - * - * Returns `null` if non detected for this `type`. - */ - address: string | null; - - /** - * The email body content. - * - * Returns `null` if no body detected. - */ - body: string | null; - - /** - * The email subject. - * - * Returns `null` if no subject was detected. - */ - subject: string | null; - } - - /** - * A phone number and it's detected type, e.g. `VisionBarcodePhoneType.MOBILE` - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.PHONE) { - * console.log(barcode.phone); - * } else if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo.phones[0]); - * } - * ``` - */ - export interface VisionBarcodePhone { - /** - * The detected phone number. - * - * Returns `null` if no number detected for this type. - */ - number: string | null; - - /** - * Gets type of the phone number, e.g. `VisionBarcodePhoneType.MOBILE`. - * - * See also `VisionBarcodePhoneType`. - */ - type: number; - } - - /** - * Custom options for barcode detection. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeFormat, VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath, { - * barcodeFormats: [VisionBarcodeFormat.QR_CODE] - * }); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo); - * } - * ``` - */ - export interface VisionBarcodeDetectorOptions { - /** - * Set the barcode formats to detect. - * - * Defaults to `VisionBarcodeFormat.ALL_FORMATS`; - * - * @param formats Array of `VisionBarcodeFormat` types. - */ - barcodeFormats?: VisionBarcodeFormat[]; - } - - /** - * Barcode format constants - enumeration of supported barcode formats. - * - * Can be used to specify the known type of a barcode before processing; via `VisionBarcodeDetectorOptions.setBarcodeFormats()` - */ - export enum VisionBarcodeFormat { - /** - * Barcode format constant representing the union of all supported formats. - */ - ALL_FORMATS = 0, - - /** - * Barcode format constant for AZTEC. - */ - AZTEC = 4096, - - /** - * Barcode format constant for Codabar. - */ - CODABAR = 8, - - /** - * Barcode format constant for Code 128. - */ - CODE_128 = 1, - - /** - * Barcode format constant for Code 39. - */ - CODE_39 = 2, - - /** - * Barcode format constant for Code 93. - */ - CODE_93 = 4, - - /** - * Barcode format constant for Data Matrix. - */ - DATA_MATRIX = 16, - - /** - * Barcode format constant for EAN-13. - */ - EAN_13 = 32, - - /** - * Barcode format constant for EAN-8. - */ - EAN_8 = 64, - - /** - * Barcode format constant for ITF (Interleaved Two-of-Five). - */ - ITF = 128, - - /** - * Barcode format constant for PDF-417. - */ - PDF417 = 2048, - - /** - * Barcode format constant for QR Code. - */ - QR_CODE = 256, - - /** - * Barcode format unknown to the current SDK, but understood by Google Play services. - */ - UNKNOWN = -1, - - /** - * Barcode format constant for UPC-A. - */ - UPC_A = 512, - - /** - * Barcode format constant for UPC-E. - */ - UPC_E = 1024, - } - - /** - * Barcode value type constants - enumeration of supported barcode content value types. - * - * Can be used with `VisionBarcode.valueType` to determine the barcode content type of a detected barcode. - * - * #### Example - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * // check for a calendar event barcode value type - * if (barcode && barcode.valueType === VisionBarcodeValueType.CALENDAR_EVENT) { - * console.log(barcode.calendarEvent); - * } - * ``` - */ - export enum VisionBarcodeValueType { - /** - * Barcode value type constant for calendar events. - */ - CALENDAR_EVENT = 11, - - /** - * Barcode value type constant for contact information. - */ - CONTACT_INFO = 1, - - /** - * Barcode value type constant for driver's license data. - */ - DRIVER_LICENSE = 12, - - /** - * Barcode value type constant for email message details. - */ - EMAIL = 2, - - /** - * Barcode value type constant for geographic coordinates. - */ - GEO = 10, - - /** - * Barcode value type constant for ISBNs. - */ - ISBN = 3, - - /** - * Barcode value type constant for phone numbers. - */ - PHONE = 4, - - /** - * Barcode value type constant for product codes. - */ - PRODUCT = 5, - - /** - * Barcode value type constant for SMS details. - */ - SMS = 6, - - /** - * Barcode value type constant for plain text. - */ - TEXT = 7, - - /** - * Barcode value type unknown, which indicates the current version of SDK cannot recognize the structure of the barcode. - */ - UNKNOWN = 0, - - /** - * Barcode value type constant for URLs/bookmarks. - */ - URL = 8, - - /** - * Barcode value type constant for WiFi access point details. - */ - WIFI = 9, - } - - /** - * The type of a address detected in a barcode. - * - * Use with `VisionBarcodeAddress.type`. - */ - export enum VisionBarcodeAddressType { - /** - * Unknown type - */ - UNKNOWN = 0, - - /** - * Address is specified as a WORK address. - */ - WORK = 1, - - /** - * Address is specified as a HOME address. - */ - HOME = 2, - } - - /** - * The type of an email detected in a barcode. - * - * Use with `VisionBarcodeEmail.type`. - */ - export enum VisionBarcodeEmailType { - /** - * Unknown type - */ - UNKNOWN = 0, - - /** - * Email address is specified as a WORK email. - */ - WORK = 1, - - /** - * Email address is specified as a HOME / personal email. - */ - HOME = 2, - } - - /** - * The type of a phone number detected in a barcode. - * - * Use with `VisionBarcodePhone.type`. - */ - export enum VisionBarcodePhoneType { - /** - * Fax machine. - */ - FAX = 3, - - /** - * Home phone. - */ - HOME = 2, - - /** - * Mobile Phone. - */ - MOBILE = 4, - - /** - * Unknown type. - */ - UNKNOWN = 0, - - /** - * Work phone. - */ - WORK = 1, - } - - /** - * The type of wifi encryption used for a `VisionBarcodeWifi` instance. - * - * Use with `VisionBarcodeWifi.encryptionType`. - */ - export enum VisionBarcodeWifiEncryptionType { - /** - * Wifi has no encryption and is open. - */ - OPEN = 1, - - /** - * Wifi uses WPA encryption. This includes WPA2. - */ - WPA = 2, - - /** - * Wifi uses WEP encryption. - */ - WEP = 3, - } -} diff --git a/packages/ml-vision/lib/VisionBarcodeAddressType.js b/packages/ml-vision/lib/VisionBarcodeAddressType.js deleted file mode 100644 index 5d4971f5aa..0000000000 --- a/packages/ml-vision/lib/VisionBarcodeAddressType.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - UNKNOWN: 0, - WORK: 1, - HOME: 2, -}; diff --git a/packages/ml-vision/lib/VisionBarcodeEmailType.js b/packages/ml-vision/lib/VisionBarcodeEmailType.js deleted file mode 100644 index 5d4971f5aa..0000000000 --- a/packages/ml-vision/lib/VisionBarcodeEmailType.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - UNKNOWN: 0, - WORK: 1, - HOME: 2, -}; diff --git a/packages/ml-vision/lib/VisionBarcodeFormat.js b/packages/ml-vision/lib/VisionBarcodeFormat.js deleted file mode 100644 index 70a0887252..0000000000 --- a/packages/ml-vision/lib/VisionBarcodeFormat.js +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - /** - * Barcode format constant representing the union of all supported formats. - */ - ALL_FORMATS: 0, - /** - * Barcode format constant for AZTEC. - */ - AZTEC: 4096, - /** - * Barcode format constant for Codabar. - */ - CODABAR: 8, - /** - * Barcode format constant for Code 128. - */ - CODE_128: 1, - /** - * Barcode format constant for Code 39. - */ - CODE_39: 2, - /** - * Barcode format constant for Code 93. - */ - CODE_93: 4, - /** - * Barcode format constant for Data Matrix. - */ - DATA_MATRIX: 16, - /** - * Barcode format constant for EAN-13. - */ - EAN_13: 32, - /** - * Barcode format constant for EAN-8. - */ - EAN_8: 64, - /** - * Barcode format constant for ITF (Interleaved Two-of-Five). - */ - ITF: 128, - /** - * Barcode format constant for PDF-417. - */ - PDF417: 2048, - /** - * Barcode format constant for QR Code. - */ - QR_CODE: 256, - /** - * Barcode format unknown to the current SDK, but understood by Google Play services. - */ - UNKNOWN: -1, - /** - * Barcode format constant for UPC-A. - */ - UPC_A: 512, - /** - * Barcode format constant for UPC-E. - */ - UPC_E: 1024, -}; diff --git a/packages/ml-vision/lib/VisionBarcodePhoneType.js b/packages/ml-vision/lib/VisionBarcodePhoneType.js deleted file mode 100644 index d63a55afc8..0000000000 --- a/packages/ml-vision/lib/VisionBarcodePhoneType.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - FAX: 3, - HOME: 2, - MOBILE: 4, - UNKNOWN: 0, - WORK: 1, -}; diff --git a/packages/ml-vision/lib/VisionBarcodeValueType.js b/packages/ml-vision/lib/VisionBarcodeValueType.js deleted file mode 100644 index 4b81f202f8..0000000000 --- a/packages/ml-vision/lib/VisionBarcodeValueType.js +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - /** - * Barcode value type constant for calendar events. - */ - CALENDAR_EVENT: 11, - - /** - * Barcode value type constant for contact information. - */ - CONTACT_INFO: 1, - - /** - * Barcode value type constant for driver's license data. - */ - DRIVER_LICENSE: 12, - - /** - * Barcode value type constant for email message details. - */ - EMAIL: 2, - - /** - * Barcode value type constant for geographic coordinates. - */ - GEO: 10, - - /** - * Barcode value type constant for ISBNs. - */ - ISBN: 3, - - /** - * Barcode value type constant for phone numbers. - */ - PHONE: 4, - - /** - * Barcode value type constant for product codes. - */ - PRODUCT: 5, - - /** - * Barcode value type constant for SMS details. - */ - SMS: 6, - - /** - * Barcode value type constant for plain text. - */ - TEXT: 7, - - /** - * Barcode value type unknown, which indicates the current version of SDK cannot recognize the structure of the barcode. - */ - UNKNOWN: 0, - - /** - * Barcode value type constant for URLs/bookmarks. - */ - URL: 8, - - /** - * Barcode value type constant for WiFi access point details. - */ - WIFI: 9, -}; diff --git a/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js b/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js deleted file mode 100644 index a2312fa078..0000000000 --- a/packages/ml-vision/lib/VisionBarcodeWifiEncryptionType.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - OPEN: 1, - WPA: 2, - WEP: 3, -}; diff --git a/packages/ml-vision/lib/VisionFaceContourType.js b/packages/ml-vision/lib/VisionFaceContourType.js deleted file mode 100644 index cdab469370..0000000000 --- a/packages/ml-vision/lib/VisionFaceContourType.js +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - ALL_POINTS: 1, - FACE: 2, - LEFT_EYE: 7, - LEFT_EYEBROW_BOTTOM: 4, - LEFT_EYEBROW_TOP: 3, - LOWER_LIP_BOTTOM: 12, - LOWER_LIP_TOP: 11, - NOSE_BOTTOM: 14, - NOSE_BRIDGE: 13, - RIGHT_EYE: 8, - RIGHT_EYEBROW_BOTTOM: 6, - RIGHT_EYEBROW_TOP: 5, - UPPER_LIP_BOTTOM: 10, - UPPER_LIP_TOP: 9, -}; diff --git a/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js b/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js deleted file mode 100644 index c4770ed952..0000000000 --- a/packages/ml-vision/lib/VisionFaceDetectorClassificationMode.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - NO_CLASSIFICATIONS: 1, - ALL_CLASSIFICATIONS: 2, -}; diff --git a/packages/ml-vision/lib/VisionFaceDetectorContourMode.js b/packages/ml-vision/lib/VisionFaceDetectorContourMode.js deleted file mode 100644 index 6f2ac438bf..0000000000 --- a/packages/ml-vision/lib/VisionFaceDetectorContourMode.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - NO_CONTOURS: 1, - ALL_CONTOURS: 2, -}; diff --git a/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js b/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js deleted file mode 100644 index 0bdc0bf212..0000000000 --- a/packages/ml-vision/lib/VisionFaceDetectorLandmarkMode.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - NO_LANDMARKS: 1, - ALL_LANDMARKS: 2, -}; diff --git a/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js b/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js deleted file mode 100644 index 0d2a1aa6ac..0000000000 --- a/packages/ml-vision/lib/VisionFaceDetectorPerformanceMode.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - FAST: 1, - ACCURATE: 2, -}; diff --git a/packages/ml-vision/lib/VisionFaceLandmarkType.js b/packages/ml-vision/lib/VisionFaceLandmarkType.js deleted file mode 100644 index b4b13dbc81..0000000000 --- a/packages/ml-vision/lib/VisionFaceLandmarkType.js +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -export default { - LEFT_CHEEK: 1, - LEFT_EAR: 3, - LEFT_EYE: 4, - MOUTH_BOTTOM: 0, - MOUTH_LEFT: 5, - MOUTH_RIGHT: 11, - NOSE_BASE: 6, - RIGHT_CHEEK: 7, - RIGHT_EAR: 9, - RIGHT_EYE: 10, -}; diff --git a/packages/ml-vision/lib/VisionPoint.js b/packages/ml-vision/lib/VisionPoint.js deleted file mode 100644 index 1e55ef745e..0000000000 --- a/packages/ml-vision/lib/VisionPoint.js +++ /dev/null @@ -1,83 +0,0 @@ -// TODO introduce in a later release if required -// /* eslint-disable no-bitwise */ -// -// /* -// * Copyright (c) 2016-present Invertase Limited & Contributors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this library except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * -// */ -// -// export default class VisionPoint { -// constructor(x, y) { -// this._x = x || 0; -// this._y = y || 0; -// } -// -// /** -// * Set the point's x and y coordinates -// * -// * @param x -// * @param y -// */ -// set(x, y) { -// // todo arg validate number for all args -// this._x = x; -// this._y = y; -// } -// -// /** -// * Copy the coordinates from the source point into this point. -// * -// * @param otherPoint VisionPoint -// */ -// setFromPoint(otherPoint) { -// // todo arg instance of VisionPoint check -// this.set(otherPoint.x, otherPoint.y); -// } -// -// get x() { -// return this._x; -// } -// -// get y() { -// return this._y; -// } -// -// /** -// * Returns true if this VisionPoint has the same coordinates as the specified VisionPoint. -// * -// * @param otherPoint -// * @returns {boolean} -// */ -// isEqual(otherPoint) { -// // todo arg instance of VisionPoint check -// return this.toString() === otherPoint.toString(); -// } -// -// /** -// * Returns this point as an array of [x, y] -// * @returns {*[]} -// */ -// toArray() { -// return [this.x, this.y]; -// } -// -// /** -// * Returns this point as an string, e.g VisionPoint[x, y] -// * @returns {string} -// */ -// toString() { -// return `Point[${this.x}, ${this.y}]`; -// } -// } diff --git a/packages/ml-vision/lib/VisionRectangle.js b/packages/ml-vision/lib/VisionRectangle.js deleted file mode 100644 index 8e781805bb..0000000000 --- a/packages/ml-vision/lib/VisionRectangle.js +++ /dev/null @@ -1,206 +0,0 @@ -// TODO introduce in a later release if required -// /* eslint-disable no-bitwise */ -// -// /* -// * Copyright (c) 2016-present Invertase Limited & Contributors -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this library except in compliance with the License. -// * You may obtain a copy of the License at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// * -// */ -// -// export default class VisionRectangle { -// /** -// * -// * @param left -// * @param top -// * @param right -// * @param bottom -// */ -// constructor(left, top, right, bottom) { -// this._left = left || 0; -// this._top = top || 0; -// this._right = right || 0; -// this._bottom = bottom || 0; -// } -// -// /** -// * Set the rectangle's coordinates to the specified values. -// * -// * @param left -// * @param top -// * @param right -// * @param bottom -// */ -// set(left, top, right, bottom) { -// // todo arg validate number for all args -// // todo arg validate left <= right -// // todo arg validate top <= bottom -// this._left = left; -// this._top = top; -// this._right = right; -// this._bottom = bottom; -// } -// -// /** -// * Copy the coordinates from the source rectangle into this rectangle. -// * -// * @param otherRect VisionRectangle -// */ -// setFromRectangle(otherRect) { -// // todo arg instance of VisionRectangle check -// this.set(otherRect.left, otherRect.top, otherRect.right, otherRect.bottom); -// } -// -// get top() { -// return this._top; -// } -// -// get left() { -// return this._left; -// } -// -// get bottom() { -// return this._bottom; -// } -// -// get right() { -// return this._right; -// } -// -// get width() { -// return this._right - this._left; -// } -// -// get height() { -// return this._bottom - this._top; -// } -// -// /** -// * Returns whether the first rectangle contains the second rectangle. -// * @param otherRect VisionRectangle -// * @returns {boolean} -// */ -// containsRectangle(otherRect) { -// // todo arg instance of VisionRectangle check -// return ( -// !this.isEmpty() && -// this.left <= otherRect.left && -// this.top <= otherRect.top && -// this.right >= otherRect.right && -// this.bottom >= otherRect.bottom -// ); -// } -// -// /** -// * Returns whether a rectangle contains a specified point. -// * -// * @param x -// * @param y -// * @returns {boolean} -// */ -// containsPoint(x, y) { -// return !this.isEmpty() && x >= this.left && x < this.right && y >= this.top && y < this.bottom; -// } -// -// /** -// * Returns whether two rectangles intersect. -// * -// * @param otherRect VisionRectangle -// * @returns {boolean} -// */ -// intersectsRectangle(otherRect) { -// // todo arg instance of VisionRectangle check -// return ( -// this.left < otherRect.right && -// otherRect.left < this.right && -// this.top < otherRect.bottom && -// otherRect.top < this.bottom -// ); -// } -// -// /** -// * If the rectangle specified intersects this -// * rectangle, return true and set this rectangle to that intersection, -// * otherwise return false and do not change this rectangle. No check is -// * performed to see if either rectangle is empty. Note: To just test for -// * intersection, use {@link #intersectsRectangle(otherRect: VisionRectangle)}. -// * -// * @param otherRect -// * @returns {boolean} -// */ -// intersectRectangle(otherRect) { -// // todo arg instance of VisionRectangle check -// if ( -// this.left < otherRect.right && -// otherRect.left < this.right && -// this.top < otherRect.bottom && -// otherRect.top < this.bottom -// ) { -// if (this.left < otherRect.left) this._left = otherRect.left; -// if (this.top < otherRect.top) this._top = otherRect.top; -// if (this.right > otherRect.right) this._right = otherRect.right; -// if (this.bottom > otherRect.bottom) this._bottom = otherRect.bottom; -// return true; -// } -// return false; -// } -// -// /** -// * Returns the horizontal center of the rectangle. -// */ -// centerX() { -// return (this.left + this.right) >> 1; -// } -// -// /** -// * Returns the vertical center of the rectangle. -// */ -// centerY() { -// return (this.top + this.bottom) >> 1; -// } -// -// /** -// * Returns whether a rectangle has zero width or height -// * @returns {boolean} -// */ -// isEmpty() { -// return this.left >= this.right || this.top >= this.bottom; -// } -// -// /** -// * Returns true if this VisionRectangle has the same bounding box as the specified VisionRectangle. -// * -// * @param otherRect -// * @returns {boolean} -// */ -// isEqual(otherRect) { -// // todo arg instance of VisionPoint check -// return this.toString() === otherRect.toString(); -// } -// -// /** -// * Returns this rectangle as an array of [left, top, right, bottom] -// * @returns {*[]} -// */ -// toArray() { -// return [this.left, this.top, this.right, this.bottom]; -// } -// -// /** -// * Returns this rectangle as an string, e.g VisionRectangle[left, top, right, bottom] -// * @returns {string} -// */ -// toString() { -// return `Rectangle[${this.left}, ${this.top}, ${this.right}, ${this.bottom}]`; -// } -// } diff --git a/packages/ml-vision/lib/index.d.ts b/packages/ml-vision/lib/index.d.ts deleted file mode 100644 index dd18c28578..0000000000 --- a/packages/ml-vision/lib/index.d.ts +++ /dev/null @@ -1,1236 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { ReactNativeFirebase } from '@react-native-firebase/app'; -import { MLKitVision } from './BarcodeDetectorTypes'; -/** - * Firebase ML Kit package for React Native. - * - * #### Example 1 - * - * Access the firebase export from the `ml-vision` package: - * - * ```js - * import { firebase } from '@react-native-firebase/ml-vision'; - * - * // firebase.vision().X - * ``` - * - * #### Example 2 - * - * Using the default export from the `ml-vision` package: - * - * ```js - * import vision from '@react-native-firebase/ml-vision'; - * - * // vision().X - * ``` - * - * #### Example 3 - * - * Using the default export from the `app` package: - * - * ```js - * import firebase from '@react-native-firebase/app'; - * import '@react-native-firebase/ml-vision'; - * - * // firebase.vision().X - * ``` - * - * @firebase ml-vision - */ -export namespace FirebaseVisionTypes { - import FirebaseModule = ReactNativeFirebase.FirebaseModule; - - export interface Statics { - VisionCloudTextRecognizerModelType: typeof VisionCloudTextRecognizerModelType; - VisionFaceDetectorClassificationMode: typeof VisionFaceDetectorClassificationMode; - VisionFaceDetectorContourMode: typeof VisionFaceDetectorContourMode; - VisionFaceDetectorLandmarkMode: typeof VisionFaceDetectorLandmarkMode; - VisionFaceDetectorPerformanceMode: typeof VisionFaceDetectorPerformanceMode; - VisionFaceLandmarkType: typeof VisionFaceLandmarkType; - VisionFaceContourType: typeof VisionFaceContourType; - VisionCloudLandmarkRecognizerModelType: typeof VisionCloudLandmarkRecognizerModelType; - VisionDocumentTextRecognizedBreakType: typeof VisionDocumentTextRecognizedBreakType; - VisionBarcodeFormat: typeof MLKitVision.VisionBarcodeFormat; - VisionBarcodeValueType: typeof MLKitVision.VisionBarcodeValueType; - VisionBarcodeAddressType: typeof MLKitVision.VisionBarcodeAddressType; - VisionBarcodeEmailType: typeof MLKitVision.VisionBarcodeEmailType; - VisionBarcodePhoneType: typeof MLKitVision.VisionBarcodePhoneType; - VisionBarcodeWifiEncryptionType: typeof MLKitVision.VisionBarcodeWifiEncryptionType; - } - - /** - * Options for vision face detector. - */ - export interface VisionFaceDetectorOptions { - /** - * Indicates whether to run additional classifiers for characterizing attributes such as "smiling" and "eyes open". - * - * Defaults to `VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS`. - * - * #### Example - * - * ```js - * const faces = await firebase.vision().faceDetectorProcessImage(filePath, { - * classificationMode: VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS, - * }); - * ``` - */ - classificationMode?: - | VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS - | VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS; - - /** - * Sets whether to detect no contours or all contours. Processing time increases as the number of contours to search - * for increases, so detecting all contours will increase the overall detection time. Note that it would return up - * to 5 faces contours. - * - * Defaults to `VisionFaceDetectorContourMode.NO_CONTOURS`. - * - * #### Example - * - * ```js - * const faces = await firebase.vision().faceDetectorProcessImage(filePath, { - * contourMode: VisionFaceDetectorContourMode.ALL_CONTOURS, - * }); - * ``` - */ - contourMode?: - | VisionFaceDetectorContourMode.NO_CONTOURS - | VisionFaceDetectorContourMode.ALL_CONTOURS; - - /** - * Sets whether to detect no landmarks or all landmarks. Processing time increases as the number of landmarks to - * search for increases, so detecting all landmarks will increase the overall detection time. Detecting landmarks - * can improve pose estimation. - * - * Defaults to `VisionFaceDetectorLandmarkMode.NO_LANDMARKS`. - * - * #### Example - * - * ```js - * const faces = await firebase.vision().faceDetectorProcessImage(filePath, { - * landmarkMode: VisionFaceDetectorLandmarkMode.ALL_LANDMARKS, - * }); - * ``` - */ - landmarkMode?: - | VisionFaceDetectorLandmarkMode.NO_LANDMARKS - | VisionFaceDetectorLandmarkMode.ALL_LANDMARKS; - - /** - * Sets the smallest desired face size, expressed as a proportion of the width of the head to the image width. For - * example, if a value of 0.1 is specified then the smallest face to search for is roughly 10% of the width of the - * image being searched. - * - * Setting the min face size is a performance vs. accuracy trade-off: setting the face size smaller will enable the - * detector to find smaller faces but detection will take longer; setting the face size larger will exclude smaller - * faces but will run faster. - * - * This is not a hard limit on face size; the detector may find faces slightly smaller than specified. - * - * Defaults to 0.1. - * - * #### Example - * - * ```js - * const faces = await firebase.vision().faceDetectorProcessImage(filePath, { - * minFaceSize: 0.5, - * }); - * ``` - */ - minFaceSize?: number; - - /** - * Extended option for controlling additional accuracy / speed trade-offs in performing face detection. In general, - * choosing the more accurate mode will generally result in longer runtime, whereas choosing the faster mode will - * generally result in detecting fewer faces. - * - * Defaults to `VisionFaceDetectorPerformanceMode.FAST`. - * - * #### Example - * - * ```js - * const faces = await firebase.vision().faceDetectorProcessImage(filePath, { - * performanceMode: VisionFaceDetectorPerformanceMode.ACCURATE, - * }); - * ``` - */ - performanceMode?: - | VisionFaceDetectorPerformanceMode.FAST - | VisionFaceDetectorPerformanceMode.ACCURATE; - } - - /** - * Options for on device image labeler. Confidence threshold could be provided for the label detection. - * - - */ - export interface VisionImageLabelerOptions { - /** - * Sets confidence threshold of detected labels. Only labels detected with confidence higher than this threshold are returned. - * - * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned. - * - * Defaults to 0.5. - * - * #### Example - * - * ```js - * const labels = await firebase.vision().imageLabelerProcessImage(filePath, { - * confidenceThreshold: 0.8, - * }); - * ``` - */ - confidenceThreshold?: number; - } - - /** - * Options for cloud image labeler. Confidence threshold could be provided for the label detection. - * - * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned. The default threshold is 0.5. - * - * Note: at most 20 labels will be returned for cloud image labeler. - */ - export interface VisionCloudImageLabelerOptions { - /** - * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API. - * - * > Do not set this for debug build if you use simulators to test. - * - * #### Example - * - * ```js - * await firebase.vision().cloudImageLabelerProcessImage(filePath, { - * enforceCertFingerprintMatch: true, - * }); - * ``` - */ - enforceCertFingerprintMatch?: boolean; - - /** - * Sets confidence threshold in the range of [0.0 - 1.0] of detected labels. Only labels detected with confidence higher than this threshold are returned. - * - * Defaults to 0.5. - * - * #### Example - * - * ```js - * await firebase.vision().cloudImageLabelerProcessImage(filePath, { - * confidenceThreshold: 0.8, - * }); - * ``` - */ - confidenceThreshold?: number; - - /** - * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used. - * - * #### Example - * - * ```js - * await firebase.vision().cloudImageLabelerProcessImage(filePath, { - * apiKeyOverride: 'xyz123', - * }); - * ``` - * - * @ios - */ - apiKeyOverride?: string; - } - - /** - * Detector for finding popular natural and man-made structures within an image. - */ - export interface VisionCloudLandmarkRecognizerOptions { - /** - * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API. - * - * > Do not set this for debug build if you use simulators to test. - */ - enforceCertFingerprintMatch?: boolean; - - /** - * Sets the maximum number of results of this type. - * - * Defaults to 10. - */ - maxResults?: number; - - /** - * Sets model type for the detection. - * - * Defaults to `VisionCloudLandmarkRecognizerModelType.STABLE_MODEL`. - */ - modelType?: - | VisionCloudLandmarkRecognizerModelType.STABLE_MODEL - | VisionCloudLandmarkRecognizerModelType.LATEST_MODEL; - - /** - * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used. - * - * @ios - */ - apiKeyOverride?: string; - } - - /** - * Model types for cloud landmark recognition. - */ - export enum VisionCloudLandmarkRecognizerModelType { - /** - * Stable model would be used. - */ - STABLE_MODEL = 1, - - /** - * Latest model would be used. - */ - LATEST_MODEL = 2, - } - - /** - * Options for cloud text recognizer. - */ - export interface VisionCloudTextRecognizerOptions { - /** - * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API. - * - * > Do not set this for debug build if you use simulators to test. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * enforceCertFingerprintMatch: true, - * }); - * ``` - */ - enforceCertFingerprintMatch?: boolean; - - /** - * Sets model type for cloud text recognition. The two models SPARSE_MODEL and DENSE_MODEL handle different text densities in an image. - * - * See `VisionCloudTextRecognizerModelType` for types. - * - * Defaults to `VisionCloudTextRecognizerModelType.SPARSE_MODEL`. - * - * #### Example - * - * ```js - * import { - * firebase, - * VisionCloudTextRecognizerModelType, - * } from '@react-native-firebase/ml-vision'; - * - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * modelType: VisionCloudTextRecognizerModelType.DENSE_MODEL, - * }); - * ``` - */ - modelType?: - | VisionCloudTextRecognizerModelType.SPARSE_MODEL - | VisionCloudTextRecognizerModelType.DENSE_MODEL; - - /** - * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language - * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when - * the language of the text in the image is known, setting a hint will help get better results (although it will be a - * significant hindrance if the hint is wrong). - * - * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * languageHints: ['fr', 'de'], - * }); - * ``` - */ - languageHints?: string[]; - - /** - * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * apiKeyOverride: 'xyz123', - * }); - * ``` - * - * @ios - */ - apiKeyOverride?: string; - } - - /** - * Options for the cloud document text recognizer. - */ - export interface VisionCloudDocumentTextRecognizerOptions { - /** - * Only allow registered application instances with matching certificate fingerprint to use Cloud Vision API. - * - * > Do not set this for debug build if you use simulators to test. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * enforceCertFingerprintMatch: true, - * }); - * ``` - */ - enforceCertFingerprintMatch?: boolean; - - /** - * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language - * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when - * the language of the text in the image is known, setting a hint will help get better results (although it will be a - * significant hindrance if the hint is wrong). - * - * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * languageHints: ['fr', 'de'], - * }); - * ``` - */ - languageHints?: string[]; - - /** - * API key to use for Cloud Vision API. If not set, the default API key from `firebase.app()` will be used. - * - * #### Example - * - * ```js - * await firebase.vision().cloudTextRecognizerProcessImage(filePath, { - * apiKeyOverride: 'xyz123', - * }); - * ``` - * - * @ios - */ - apiKeyOverride?: string; - } - - /** - * The cloud model type used for in VisionCloudTextRecognizerOptions & VisionCloudDocumentTextRecognizerOptions - * - * Defaults to `SPARSE_MODEL` - */ - export enum VisionCloudTextRecognizerModelType { - /** - * Dense model type. It is more suitable for well-formatted dense text. - */ - SPARSE_MODEL = 1, - /** - * Sparse model type. It is more suitable for sparse text. - */ - DENSE_MODEL = 2, - } - - /** - * Indicates whether to run additional classifiers for characterizing attributes such as "smiling" and "eyes open". - */ - export enum VisionFaceDetectorClassificationMode { - /** - * Disables collection of classifier information. - */ - NO_CLASSIFICATIONS = 1, - - /** - * Enables collection of classifier information. - */ - ALL_CLASSIFICATIONS = 2, - } - - /** - * Sets whether to detect contours or not. Processing time increases as the number of contours to search for increases, - * so detecting all contours will increase the overall detection time. - */ - export enum VisionFaceDetectorContourMode { - /** - * Disables collection of contour information. - */ - NO_CONTOURS = 1, - - /** - * Enables collection of contour information. - */ - ALL_CONTOURS = 2, - } - - /** - * Sets whether to detect no landmarks or all landmarks. Processing time increases as the number of landmarks to - * search for increases, so detecting all landmarks will increase the overall detection time. Detecting - * landmarks can improve pose estimation. - */ - export enum VisionFaceDetectorLandmarkMode { - /** - * Disables collection of landmark information. - */ - NO_LANDMARKS = 1, - - /** - * Enables collection of landmark information. - */ - ALL_LANDMARKS = 2, - } - - /** - * Extended option for controlling additional accuracy / speed trade-offs in performing face detection. In general, - * choosing the more accurate mode will generally result in longer runtime, whereas choosing the faster - * mode will generally result in detecting fewer faces. - */ - export enum VisionFaceDetectorPerformanceMode { - /** - * Indicates a preference for speed in extended settings that may make an accuracy vs. speed trade-off. This will - * tend to detect fewer faces and may be less precise in determining values such as position, but will run faster. - */ - FAST = 1, - - /** - * Indicates a preference for accuracy in extended settings that may make an accuracy vs. speed trade-off. - * This will tend to detect more faces and may be more precise in determining values such as position, at the cost - * of speed. - */ - ACCURATE = 2, - } - - /** - * A Rectangle holds four number coordinates relative to the processed image. - * Rectangle are represented as [left, top, right, bottom]. - * - * Used by Vision Text Recognizer, Face Detector & Landmark Recognition APIs. - */ - export type VisionRectangle = [number, number, number, number]; - - /** - * A point holds two number coordinates relative to the processed image. - * Points are represented as [x, y]. - * - * Used by Vision Text Recognizer, Face Detector & Landmark Recognition APIs. - */ - export type VisionPoint = [number, number]; - - /** - * A hierarchical representation of texts recognized in an image. - */ - export interface VisionText { - /** - * Retrieve the recognized text as a string. - */ - text: string; - - /** - * Gets an array `VisionTextBlock`, which is a block of text that can be further decomposed to an array of `VisionTextLine`. - */ - blocks: VisionTextBlock[]; - } - - /** - * Represents a block of text. - */ - export interface VisionDocumentTextBlock extends VisionDocumentTextBase { - /** - * Gets an Array of `VisionDocumentTextParagraph`s that make up this block. - */ - paragraphs: VisionDocumentTextParagraph[]; - } - - /** - * A structural unit of text representing a number of words in certain order. - */ - export interface VisionDocumentTextParagraph extends VisionDocumentTextBase { - /** - * Gets an Array of `VisionDocumentTextWord`s that make up this paragraph. - * - * Returns an empty list if no Word is found. - */ - words: VisionDocumentTextWord[]; - } - - /** - * A single word representation. - */ - export interface VisionDocumentTextWord extends VisionDocumentTextBase { - /** - * Gets an Array of `VisionDocumentTextSymbol`s that make up this word. - * The order of the symbols follows the natural reading order. - */ - symbols: VisionDocumentTextSymbol[]; - } - - /** - * A single symbol representation. - */ - export type VisionDocumentTextSymbol = VisionDocumentTextBase; - - /** - * Enum representing the detected break type. - */ - export enum VisionDocumentTextRecognizedBreakType { - /** - * Line-wrapping break. - */ - EOL_SURE_SPACE = 3, - - /** - * End-line hyphen that is not present in text; does not co-occur with `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. - */ - HYPHEN = 4, - - /** - * Line break that ends a paragraph. - */ - LINE_BREAK = 5, - - /** - * Regular space. - */ - SPACE = 1, - - /** - * Sure space (very wide). - */ - SURE_SPACE = 2, - - /** - * Unknown break label type. - */ - UNKNOWN = 0, - } - - /** - * A recognized break is the detected start or end of a structural component. - */ - export interface VisionDocumentTextRecognizedBreak { - /** - * Gets detected break type. - */ - breakType: VisionDocumentTextRecognizedBreakType; - - /** - * Returns true if break prepends an element. - */ - isPrefix: boolean; - } - /** - * A shared type that all VisionDocumentText components inherit from - */ - export interface VisionDocumentTextBase { - /** - * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `VisionTextBlock`, and left-to-right within a `VisionTextLine`. - */ - text: string; - - /** - * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null. - */ - confidence: null | number; - - /** - * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array) - * - * A language is the BCP-47 language code, such as "en-US" or "sr-Latn". - */ - recognizedLanguages: string[]; - - /** - * Returns the bounding rectangle of the detected text. - */ - boundingBox: VisionRectangle; - - /** - * Gets the recognized break - the detected start or end of a structural component. - */ - recognizedBreak: VisionDocumentTextRecognizedBreak; - } - - /** - * A hierarchical representation of document text recognized in an image. - */ - export interface VisionDocumentText { - /** - * Retrieve the recognized text as a string. - */ - text: string; - - /** - * Gets an array `VisionTextBlock`, which is a block of text that can be further decomposed to an array of `VisionDocumentTextParagraph`. - */ - blocks: VisionDocumentTextBlock[]; - } - - /** - * A shared type that all Vision Text components inherit from - */ - export interface VisionTextBase { - /** - * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `VisionTextBlock`, and left-to-right within a `VisionTextLine`. - */ - text: string; - - /** - * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null. - */ - confidence: null | number; - - /** - * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array) - * - * A language is the BCP-47 language code, such as "en-US" or "sr-Latn". - */ - recognizedLanguages: string[]; - - /** - * Returns the bounding rectangle of the detected text. - */ - boundingBox: VisionRectangle; - - /** - * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image. - */ - cornerPoints: VisionPoint[]; - } - - /** - * Represents a block of text (similar to a paragraph). - */ - export interface VisionTextBlock extends VisionTextBase { - /** - * Gets an Array of VisionTextLine's that make up this text block. - */ - lines: VisionTextLine[]; - } - - /** - * Represents a line of text. - */ - export interface VisionTextLine extends VisionTextBase { - /** - * Gets an Array of VisionTextElement's that make up this text block. - * - * An element is roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element. - */ - elements: VisionTextElement[]; - } - - /** - * Roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element. - */ - export type VisionTextElement = VisionTextBase; - - /** - * Represents an image label return from `imageLabelerProcessImage()` and `cloudImageLabelerProcessImage()`. - */ - export interface VisionImageLabel { - /** - * Returns a detected label from the given image. The label returned here is in English only. - * - * Use `entityId` to retrieve a unique id. - */ - text: string; - - /** - * Returns an opaque entity ID. IDs are available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/). - */ - entityId: string; - - /** - * Gets overall confidence of the result. - * - * Range between 0 (low confidence) and 1 (high confidence). - */ - confidence: number; - } - - /** - * Represents a face returned from `faceDetectorProcessImage()`. - */ - export interface VisionFace { - /** - * Returns the axis-aligned bounding rectangle of the detected face. - */ - boundingBox: VisionRectangle; - - /** - * Represent a face contour. A contour is a list of points on a detected face, such as the mouth. - * - * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE` - * landmark is the subject's left eye, not the eye that is on the left when viewing the image. - */ - faceContours: VisionFaceContour[]; - - /** - * Returns the rotation of the face about the vertical axis of the image. Positive euler y is when the face turns - * toward the right side of the of the image that is being processed. - */ - headEulerAngleY: number; - - /** - * Returns the rotation of the face about the axis pointing out of the image. Positive euler z is a - * counter-clockwise rotation within the image plane. - */ - headEulerAngleZ: number; - - /** - * Returns an array of `VisionFaceLandmark`. - * - * Returns an empty array if the landmark mode has not been enabled via `setLandmarkMode()`. - */ - landmarks: VisionFaceLandmark[]; - - /** - * Returns a value between 0.0 and 1.0 giving a probability that the face's left eye is open. - * - * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`. - */ - leftEyeOpenProbability: number; - - /** - * Returns a value between 0.0 and 1.0 giving a probability that the face's right eye is open. - * - * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`. - */ - rightEyeOpenProbability: number; - - /** - * Returns a value between 0.0 and 1.0 giving a probability that the face is smiling. - * - * Returns -1 if the classification mode has not been enabled via `setClassificationMode()`. - */ - smilingProbability: number; - } - - /** - * Represent a face landmark. A landmark is a point on a detected face, such as an eye, nose, or mouth. - * - * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE` landmark - * is the subject's left eye, not the eye that is on the left when viewing the image. - */ - export interface VisionFaceLandmark { - /** - * Returns the landmark type. - */ - type: VisionFaceLandmarkType; - - /** - * Gets a 2D point for landmark position, where (0, 0) is the upper-left corner of the image. - */ - position: VisionPoint[]; - } - - /** - * Landmark types for a face. - */ - export enum VisionFaceLandmarkType { - /** - * The midpoint between the subject's left mouth corner and the outer corner of the subject's left eye. - */ - LEFT_CHEEK = 1, - - /** - * The midpoint of the subject's left ear tip and left ear lobe. - */ - LEFT_EAR = 3, - - /** - * The center of the subject's left eye cavity. - */ - LEFT_EYE = 4, - - /** - * The center of the subject's bottom lip. - */ - MOUTH_BOTTOM = 0, - - /** - * The subject's left mouth corner where the lips meet. - */ - MOUTH_LEFT = 5, - - /** - * The subject's right mouth corner where the lips meet. - */ - MOUTH_RIGHT = 11, - - /** - * The midpoint between the subject's nostrils where the nose meets the face. - */ - NOSE_BASE = 6, - - /** - * The midpoint between the subject's right mouth corner and the outer corner of the subject's right eye. - */ - RIGHT_CHEEK = 7, - - /** - * The midpoint of the subject's right ear tip and right ear lobe. - */ - RIGHT_EAR = 9, - - /** - * The center of the subject's right eye cavity. - */ - RIGHT_EYE = 10, - } - - /** - * Represent a face contour. A contour is a list of points on a detected face, such as the mouth. - * When 'left' and 'right' are used, they are relative to the subject in the image. For example, the `LEFT_EYE` landmark - * is the subject's left eye, not the eye that is on the left when viewing the image. - */ - export interface VisionFaceContour { - /** - * Returns the contour type. - */ - type: VisionFaceContourType; - - /** - * Gets a list of 2D points for this face contour, where (0, 0) is the upper-left corner of the image. The point is - * guaranteed to be within the bounds of the image. - */ - points: VisionPoint[]; - } - - /** - * Countour type for a face. - */ - export enum VisionFaceContourType { - /** - * All points of a face contour. - */ - ALL_POINTS = 1, - - /** - * The outline of the subject's face. - */ - FACE = 2, - - /** - * The outline of the subject's left eye cavity. - */ - LEFT_EYE = 7, - - /** - * The bottom outline of the subject's left eyebrow. - */ - LEFT_EYEBROW_BOTTOM = 4, - - /** - * The top outline of the subject's left eyebrow. - */ - LEFT_EYEBROW_TOP = 3, - - /** - * The bottom outline of the subject's lower lip. - */ - LOWER_LIP_BOTTOM = 12, - - /** - * The top outline of the subject's lower lip. - */ - LOWER_LIP_TOP = 11, - - /** - * The outline of the subject's nose bridge. - */ - NOSE_BOTTOM = 14, - - /** - * The outline of the subject's nose bridge. - */ - NOSE_BRIDGE = 13, - - /** - * The outline of the subject's right eye cavity. - */ - RIGHT_EYE = 8, - - /** - * The bottom outline of the subject's right eyebrow. - */ - RIGHT_EYEBROW_BOTTOM = 6, - - /** - * The top outline of the subject's right eyebrow. - */ - RIGHT_EYEBROW_TOP = 5, - - /** - * The bottom outline of the subject's upper lip. - */ - UPPER_LIP_BOTTOM = 10, - - /** - * The top outline of the subject's upper lip. - */ - UPPER_LIP_TOP = 9, - } - - /** - * Represents a detected landmark returned from `cloudLandmarkRecognizerProcessImage()`. - */ - export interface VisionLandmark { - /** - * Gets image region of the detected landmark. Returns null if nothing was detected - */ - boundingBox: VisionRectangle | null; - - /** - * Gets overall confidence of the result. Ranging between 0 & 1. - */ - confidence: number; - - /** - * Gets opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/). - */ - entityId: string; - - /** - * Gets the detected landmark. - */ - landmark: string; - - /** - * Gets the location information for the detected entity. - * - * Multiple VisionGeoPoint elements can be present because one location may indicate the location of the scene - * in the image, and another location may indicate the location of the place where the image was taken. - * Location information is usually present for landmarks. - */ - locations: VisionGeoPoint[]; - } - - /** - * A representation of a latitude/longitude pair. - * - * This is expressed as an array of numbers representing degrees latitude and degrees longitude, in the form `[lat, lng]`. - */ - export type VisionGeoPoint = [number, number]; - - /** - * The Firebase ML Kit service interface. - * - * > This module is available for the default app only. - * - * #### Example - * - * Get the ML Kit service for the default app: - * - * ```js - * const defaultAppMLKit = firebase.vision(); - * ``` - */ - export class Module extends FirebaseModule { - /** - * Detects faces from a local image file. - * - * @param imageFilePath A local path to an image on the device. - * @param faceDetectorOptions An optional instance of `VisionFaceDetectorOptions`. - */ - faceDetectorProcessImage( - imageFilePath: string, - faceDetectorOptions?: VisionFaceDetectorOptions, - ): Promise; - - /** - * Detect text from a local image file using the on-device model. - * - * @param imageFilePath A local path to an image on the device. - */ - textRecognizerProcessImage(imageFilePath: string): Promise; - - /** - * Detect text from a local image file using the cloud (Firebase) model. - * - * @param imageFilePath A local path to an image on the device. - * @param cloudTextRecognizerOptions An instance of `VisionCloudTextRecognizerOptions`. - */ - cloudTextRecognizerProcessImage( - imageFilePath: string, - cloudTextRecognizerOptions?: VisionCloudTextRecognizerOptions, - ): Promise; - - /** - * Detect text within a document using a local image file from the cloud (Firebase) model. - * - * @param imageFilePath A local path to an image on the device. - * @param cloudDocumentTextRecognizerOptions An instance of `VisionCloudDocumentTextRecognizerOptions`. - */ - cloudDocumentTextRecognizerProcessImage( - imageFilePath: string, - cloudDocumentTextRecognizerOptions?: VisionCloudDocumentTextRecognizerOptions, - ): Promise; - - /** - * Returns an array of landmarks (as `VisionLandmark`) of a given local image file path. Landmark detection - * is done on cloud (Firebase). - * - * @param imageFilePath A local image file path. - * @param cloudLandmarkRecognizerOptions An optional instance of `VisionCloudLandmarkRecognizerOptions`. - */ - cloudLandmarkRecognizerProcessImage( - imageFilePath: string, - cloudLandmarkRecognizerOptions?: VisionCloudLandmarkRecognizerOptions, - ): Promise; - - /** - * Returns an array of labels (as `VisionImageLabel`) of a given local image file path. Label detection is done - * on device, resulting in faster results but less descriptive. - * - * #### Example - * - * ```js - * const labels = await firebase.vision().imageLabelerProcessImage(filePath, { - * confidenceThreshold: 0.8, - * }); - * ``` - * - * @param imageFilePath A local image file path. - * @param imageLabelerOptions An optional instance of `VisionImageLabelerOptions`. - */ - imageLabelerProcessImage( - imageFilePath: string, - imageLabelerOptions?: VisionImageLabelerOptions, - ): Promise; - - /** - * Returns an array of labels (as `VisionImageLabel`) of a given local image file path. Label detection is done - * on cloud (Firebase), resulting in slower results but more descriptive. - * - * #### Example - * - * ```js - * const labels = await firebase.vision().cloudImageLabelerProcessImage(filePath, { - * confidenceThreshold: 0.8, - * }); - * ``` - * - * @param imageFilePath A local image file path. - * @param cloudImageLabelerOptions An optional instance of `VisionCloudImageLabelerOptions`. - */ - cloudImageLabelerProcessImage( - imageFilePath: string, - cloudImageLabelerOptions?: VisionCloudImageLabelerOptions, - ): Promise; - - /** - * Returns an array of barcodes (as `VisionBarcode`) detected for a local image file path. - * - * Barcode detection is done locally on device. - * - * #### Example 1 - * - * ```js - * import vision, { VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo); - * } - * ``` - * - * #### Example 2 - * - * Process image with custom `VisionBarcodeDetectorOptions`. - * - * ```js - * import vision, { VisionBarcodeFormat, VisionBarcodeValueType } from '@react-native-firebase/ml-vision'; - * - * const [barcode, ...otherBarcodes] = await vision().barcodeDetectorProcessImage(filePath, { - * barcodeFormats: [VisionBarcodeFormat.QR_CODE] - * }); - * - * if (barcode && barcode.valueType === VisionBarcodeValueType.CONTACT_INFO) { - * console.log(barcode.contactInfo); - * } - * ``` - * - * @param imageFilePath A local image file path. - * @param barcodeDetectorOptions Optional instance of `VisionBarcodeDetectorOptions`. - */ - barcodeDetectorProcessImage( - imageFilePath: string, - barcodeDetectorOptions?: MLKitVision.VisionBarcodeDetectorOptions, - ): Promise; - } -} - -declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp< - FirebaseVisionTypes.Module, - FirebaseVisionTypes.Statics ->; - -export const firebase: ReactNativeFirebase.Module & { - analytics: typeof defaultExport; - app(name?: string): ReactNativeFirebase.FirebaseApp & { vision(): FirebaseVisionTypes.Module }; -}; - -export const VisionBarcodeFormat: FirebaseVisionTypes.Statics['VisionBarcodeFormat']; -export const VisionFaceContourType: FirebaseVisionTypes.Statics['VisionFaceContourType']; -export const VisionFaceLandmarkType: FirebaseVisionTypes.Statics['VisionFaceLandmarkType']; -export const VisionBarcodeValueType: FirebaseVisionTypes.Statics['VisionBarcodeValueType']; -export const VisionBarcodeEmailType: FirebaseVisionTypes.Statics['VisionBarcodeEmailType']; -export const VisionBarcodePhoneType: FirebaseVisionTypes.Statics['VisionBarcodePhoneType']; -export const VisionBarcodeAddressType: FirebaseVisionTypes.Statics['VisionBarcodeAddressType']; -export const VisionFaceDetectorContourMode: FirebaseVisionTypes.Statics['VisionFaceDetectorContourMode']; -export const VisionFaceDetectorLandmarkMode: FirebaseVisionTypes.Statics['VisionFaceDetectorLandmarkMode']; -export const VisionBarcodeWifiEncryptionType: FirebaseVisionTypes.Statics['VisionBarcodeWifiEncryptionType']; -export const VisionFaceDetectorPerformanceMode: FirebaseVisionTypes.Statics['VisionFaceDetectorPerformanceMode']; -export const VisionCloudTextRecognizerModelType: FirebaseVisionTypes.Statics['VisionCloudTextRecognizerModelType']; -export const VisionFaceDetectorClassificationMode: FirebaseVisionTypes.Statics['VisionFaceDetectorClassificationMode']; -export const VisionDocumentTextRecognizedBreakType: FirebaseVisionTypes.Statics['VisionDocumentTextRecognizedBreakType']; -export const VisionCloudLandmarkRecognizerModelType: FirebaseVisionTypes.Statics['VisionCloudLandmarkRecognizerModelType']; - -export default defaultExport; - -/** - * Attach namespace to `firebase.` and `FirebaseApp.`. - */ -declare module '@react-native-firebase/app' { - namespace ReactNativeFirebase { - import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp; - interface Module { - vision: FirebaseModuleWithStaticsAndApp< - FirebaseVisionTypes.Module, - FirebaseVisionTypes.Statics - >; - } - - interface FirebaseApp { - vision(): FirebaseVisionTypes.Module; - } - - interface FirebaseJsonConfig { - ml_vision_face_model: boolean; - ml_vision_ocr_model: boolean; - ml_vision_barcode_model: boolean; - ml_vision_label_model: boolean; - ml_vision_image_label_model: boolean; - } - } -} diff --git a/packages/ml-vision/lib/index.js b/packages/ml-vision/lib/index.js deleted file mode 100644 index 325db1ebb3..0000000000 --- a/packages/ml-vision/lib/index.js +++ /dev/null @@ -1,273 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { - isString, - toFilePath, - validateOptionalNativeDependencyExists, -} from '@react-native-firebase/app/lib/common'; -import { - createModuleNamespace, - FirebaseModule, - getFirebaseRoot, -} from '@react-native-firebase/app/lib/internal'; -import version from './version'; -import VisionBarcodeAddressType from './VisionBarcodeAddressType'; -import visionBarcodeDetectorOptions from './visionBarcodeDetectorOptions'; -import VisionBarcodeEmailType from './VisionBarcodeEmailType'; -import VisionBarcodeFormat from './VisionBarcodeFormat'; -import VisionBarcodePhoneType from './VisionBarcodePhoneType'; -import VisionBarcodeValueType from './VisionBarcodeValueType'; -import VisionBarcodeWifiEncryptionType from './VisionBarcodeWifiEncryptionType'; -import visionCloudDocumentTextRecognizerOptions from './visionCloudDocumentTextRecognizerOptions'; -import visionCloudImageLabelerOptions from './visionCloudImageLabelerOptions'; -import VisionCloudLandmarkRecognizerModelType from './VisionCloudLandmarkRecognizerModelType'; -import visionCloudLandmarkRecognizerOptions from './visionCloudLandmarkRecognizerOptions'; -import VisionCloudTextRecognizerModelType from './VisionCloudTextRecognizerModelType'; -import visionCloudTextRecognizerOptions from './visionCloudTextRecognizerOptions'; -import VisionDocumentTextRecognizedBreakType from './VisionDocumentTextRecognizedBreakType'; -import VisionFaceContourType from './VisionFaceContourType'; -import VisionFaceDetectorClassificationMode from './VisionFaceDetectorClassificationMode'; -import VisionFaceDetectorContourMode from './VisionFaceDetectorContourMode'; -import VisionFaceDetectorLandmarkMode from './VisionFaceDetectorLandmarkMode'; -import visionFaceDetectorOptions from './visionFaceDetectorOptions'; -import VisionFaceDetectorPerformanceMode from './VisionFaceDetectorPerformanceMode'; -import VisionFaceLandmarkType from './VisionFaceLandmarkType'; -import visionImageLabelerOptions from './visionImageLabelerOptions'; - -const statics = { - VisionCloudTextRecognizerModelType, - VisionFaceDetectorClassificationMode, - VisionFaceDetectorContourMode, - VisionFaceDetectorLandmarkMode, - VisionFaceDetectorPerformanceMode, - VisionFaceLandmarkType, - VisionFaceContourType, - VisionCloudLandmarkRecognizerModelType, - VisionDocumentTextRecognizedBreakType, - VisionBarcodeFormat, - VisionBarcodeValueType, - VisionBarcodeAddressType, - VisionBarcodeEmailType, - VisionBarcodePhoneType, - VisionBarcodeWifiEncryptionType, -}; - -const namespace = 'vision'; -const nativeModuleName = [ - 'RNFBMLVisionFaceDetectorModule', - 'RNFBMLVisionImageLabelerModule', - 'RNFBMLVisionTextRecognizerModule', - 'RNFBMLVisionBarcodeDetectorModule', - 'RNFBMLVisionLandmarkRecognizerModule', - 'RNFBMLVisionDocumentTextRecognizerModule', -]; - -class FirebaseMlKitVisionModule extends FirebaseModule { - faceDetectorProcessImage(localImageFilePath, faceDetectorOptions) { - validateOptionalNativeDependencyExists( - 'ml_vision_face_model', - 'ML Kit Vision Face Detector', - !!this.native.faceDetectorProcessImage, - ); - - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().faceDetectorProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionFaceDetectorOptions(faceDetectorOptions); - } catch (e) { - throw new Error( - `firebase.vision().faceDetectorProcessImage(_, *) 'faceDetectorOptions' ${e.message}.`, - ); - } - - return this.native.faceDetectorProcessImage(toFilePath(localImageFilePath), options); - } - - textRecognizerProcessImage(localImageFilePath) { - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().textRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - return this.native.textRecognizerProcessImage(toFilePath(localImageFilePath)); - } - - cloudTextRecognizerProcessImage(localImageFilePath, cloudTextRecognizerOptions) { - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().cloudTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionCloudTextRecognizerOptions(cloudTextRecognizerOptions); - } catch (e) { - throw new Error(`firebase.vision().cloudTextRecognizerProcessImage(_, *) ${e.message}`); - } - - return this.native.cloudTextRecognizerProcessImage(toFilePath(localImageFilePath), options); - } - - cloudDocumentTextRecognizerProcessImage(localImageFilePath, cloudDocumentTextRecognizerOptions) { - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().cloudDocumentTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions); - } catch (e) { - throw new Error( - `firebase.vision().cloudDocumentTextRecognizerProcessImage(_, *) ${e.message}.`, - ); - } - - return this.native.cloudDocumentTextRecognizerProcessImage( - toFilePath(localImageFilePath), - options, - ); - } - - cloudLandmarkRecognizerProcessImage(localImageFilePath, cloudLandmarkRecognizerOptions) { - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().cloudLandmarkRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions); - } catch (e) { - throw new Error(`firebase.vision().cloudLandmarkRecognizerProcessImage(_, *) ${e.message}.`); - } - - return this.native.cloudLandmarkRecognizerProcessImage(toFilePath(localImageFilePath), options); - } - - imageLabelerProcessImage(localImageFilePath, imageLabelerOptions) { - validateOptionalNativeDependencyExists( - 'ml_vision_image_label_model', - 'ML Kit Vision Image Labeler', - !!this.native.imageLabelerProcessImage, - ); - - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().imageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionImageLabelerOptions(imageLabelerOptions); - } catch (e) { - throw new Error(`firebase.vision().imageLabelerProcessImage(_, *) ${e.message}.`); - } - - return this.native.imageLabelerProcessImage(toFilePath(localImageFilePath), options); - } - - cloudImageLabelerProcessImage(localImageFilePath, cloudImageLabelerOptions) { - validateOptionalNativeDependencyExists( - 'ml_vision_image_label_model', - 'ML Kit Vision Image Labeler', - !!this.native.imageLabelerProcessImage, - ); - - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().cloudImageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionCloudImageLabelerOptions(cloudImageLabelerOptions); - } catch (e) { - throw new Error(`firebase.vision().cloudImageLabelerProcessImage(_, *) ${e.message}.`); - } - - return this.native.cloudImageLabelerProcessImage(toFilePath(localImageFilePath), options); - } - - barcodeDetectorProcessImage(localImageFilePath, barcodeDetectorOptions) { - if (!isString(localImageFilePath)) { - throw new Error( - "firebase.vision().barcodeDetectorProcessImage(*) 'localImageFilePath' expected a string local file path.", - ); - } - - let options; - try { - options = visionBarcodeDetectorOptions(barcodeDetectorOptions); - } catch (e) { - throw new Error(`firebase.vision().barcodeDetectorProcessImage(_, *) ${e.message}`); - } - - return this.native.barcodeDetectorProcessImage(toFilePath(localImageFilePath), options); - } -} - -// import { SDK_VERSION } from '@react-native-firebase/ml-vision'; -export const SDK_VERSION = version; - -// import vision from '@react-native-firebase/ml-vision'; -// vision().X(...); -export default createModuleNamespace({ - statics, - version, - namespace, - nativeModuleName, - nativeEvents: false, - hasMultiAppSupport: true, - hasCustomUrlOrRegionSupport: false, - ModuleClass: FirebaseMlKitVisionModule, -}); - -// import vision, { firebase } from '@react-native-firebase/ml-vision'; -// vision().X(...); -// firebase.vision().X(...); -export const firebase = getFirebaseRoot(); - -// e.g. -// // import { VisionCloudTextRecognizerModelType } from '@react-native-firebase/ml-vision'; -export { default as VisionBarcodeFormat } from './VisionBarcodeFormat'; -export { default as VisionFaceContourType } from './VisionFaceContourType'; -export { default as VisionFaceLandmarkType } from './VisionFaceLandmarkType'; -export { default as VisionBarcodeValueType } from './VisionBarcodeValueType'; -export { default as VisionBarcodeEmailType } from './VisionBarcodeEmailType'; -export { default as VisionBarcodePhoneType } from './VisionBarcodePhoneType'; -export { default as VisionBarcodeAddressType } from './VisionBarcodeAddressType'; -export { default as VisionFaceDetectorContourMode } from './VisionFaceDetectorContourMode'; -export { default as VisionFaceDetectorLandmarkMode } from './VisionFaceDetectorLandmarkMode'; -export { default as VisionBarcodeWifiEncryptionType } from './VisionBarcodeWifiEncryptionType'; -export { default as VisionFaceDetectorPerformanceMode } from './VisionFaceDetectorPerformanceMode'; -export { default as VisionCloudTextRecognizerModelType } from './VisionCloudTextRecognizerModelType'; -export { default as VisionFaceDetectorClassificationMode } from './VisionFaceDetectorClassificationMode'; -export { default as VisionDocumentTextRecognizedBreakType } from './VisionDocumentTextRecognizedBreakType'; -export { default as VisionCloudLandmarkRecognizerModelType } from './VisionCloudLandmarkRecognizerModelType'; diff --git a/packages/ml-vision/lib/visionBarcodeDetectorOptions.js b/packages/ml-vision/lib/visionBarcodeDetectorOptions.js deleted file mode 100644 index 0321f5fa59..0000000000 --- a/packages/ml-vision/lib/visionBarcodeDetectorOptions.js +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { isArray, isObject, isUndefined } from '@react-native-firebase/app/lib/common'; -import VisionBarcodeFormat from './VisionBarcodeFormat'; - -export default function visionBarcodeDetectorOptions(barcodeDetectorOptions) { - const out = { - barcodeFormats: [VisionBarcodeFormat.ALL_FORMATS], - }; - - if (isUndefined(barcodeDetectorOptions)) { - return out; - } - - if (!isObject(barcodeDetectorOptions)) { - throw new Error("'barcodeDetectorOptions' expected an object value."); - } - - if (barcodeDetectorOptions.barcodeFormats) { - if (!isArray(barcodeDetectorOptions.barcodeFormats)) { - throw new Error( - "'barcodeDetectorOptions.barcodeFormats' must be an array of VisionBarcodeFormat types.", - ); - } - - const validFormats = Object.values(VisionBarcodeFormat); - - for (let i = 0; i < barcodeDetectorOptions.barcodeFormats.length; i++) { - if (!validFormats.includes(barcodeDetectorOptions.barcodeFormats[i])) { - throw new Error( - `'barcodeDetectorOptions.barcodeFormats' type at index ${i} is invalid. Expected a VisionBarcodeFormat type.`, - ); - } - } - - out.barcodeFormats = barcodeDetectorOptions.barcodeFormats; - } - - return out; -} diff --git a/packages/ml-vision/lib/visionFaceDetectorOptions.js b/packages/ml-vision/lib/visionFaceDetectorOptions.js deleted file mode 100644 index 2b5e9ebe8a..0000000000 --- a/packages/ml-vision/lib/visionFaceDetectorOptions.js +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { - hasOwnProperty, - isNumber, - isObject, - isUndefined, -} from '@react-native-firebase/app/lib/common'; -import VisionFaceDetectorClassificationMode from './VisionFaceDetectorClassificationMode'; -import VisionFaceDetectorContourMode from './VisionFaceDetectorContourMode'; -import VisionFaceDetectorLandmarkMode from './VisionFaceDetectorLandmarkMode'; -import VisionFaceDetectorPerformanceMode from './VisionFaceDetectorPerformanceMode'; - -export default function visionFaceDetectorOptions(faceDetectorOptions) { - const out = { - classificationMode: VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS, - contourMode: VisionFaceDetectorContourMode.NO_CONTOURS, - landmarkMode: VisionFaceDetectorLandmarkMode.NO_LANDMARKS, - minFaceSize: 0.1, - performanceMode: VisionFaceDetectorPerformanceMode.FAST, - }; - - if (isUndefined(faceDetectorOptions)) { - return out; - } - - if (!isObject(faceDetectorOptions)) { - throw new Error("'faceDetectorOptions' expected an object value."); - } - - if (faceDetectorOptions.classificationMode) { - if ( - faceDetectorOptions.classificationMode !== - VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS && - faceDetectorOptions.classificationMode !== - VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS - ) { - throw new Error( - "'faceDetectorOptions.classificationMode' invalid classification mode. Expected VisionFaceDetectorClassificationMode.NO_CLASSIFICATIONS or VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS.", - ); - } - - out.classificationMode = faceDetectorOptions.classificationMode; - } - - if (faceDetectorOptions.contourMode) { - if ( - faceDetectorOptions.contourMode !== VisionFaceDetectorContourMode.NO_CONTOURS && - faceDetectorOptions.contourMode !== VisionFaceDetectorContourMode.ALL_CONTOURS - ) { - throw new Error( - "'faceDetectorOptions.contourMode' invalid contour mode. Expected VisionFaceDetectorContourMode.NO_CONTOURS or VisionFaceDetectorContourMode.ALL_CONTOURS.", - ); - } - - out.contourMode = faceDetectorOptions.contourMode; - } - - if (faceDetectorOptions.landmarkMode) { - if ( - faceDetectorOptions.landmarkMode !== VisionFaceDetectorLandmarkMode.NO_LANDMARKS && - faceDetectorOptions.landmarkMode !== VisionFaceDetectorLandmarkMode.ALL_LANDMARKS - ) { - throw new Error( - "'faceDetectorOptions.landmarkMode' invalid landmark mode. Expected VisionFaceDetectorLandmarkMode.NO_LANDMARKS or VisionFaceDetectorLandmarkMode.ALL_LANDMARKS.", - ); - } - - out.landmarkMode = faceDetectorOptions.landmarkMode; - } - - if (hasOwnProperty(faceDetectorOptions, 'minFaceSize')) { - if (!isNumber(faceDetectorOptions.minFaceSize)) { - throw new Error("'faceDetectorOptions.minFaceSize' expected a number value between 0 & 1."); - } - - if (faceDetectorOptions.minFaceSize < 0 || faceDetectorOptions.minFaceSize > 1) { - throw new Error("'faceDetectorOptions.minFaceSize' expected value to be between 0 & 1."); - } - - out.minFaceSize = faceDetectorOptions.minFaceSize; - } - - if (faceDetectorOptions.performanceMode) { - if ( - faceDetectorOptions.performanceMode !== VisionFaceDetectorPerformanceMode.FAST && - faceDetectorOptions.performanceMode !== VisionFaceDetectorPerformanceMode.ACCURATE - ) { - throw new Error( - "'faceDetectorOptions.performanceMode' invalid performance mode. Expected VisionFaceDetectorPerformanceMode.FAST or VisionFaceDetectorPerformanceMode.ACCURATE.", - ); - } - - out.performanceMode = faceDetectorOptions.performanceMode; - } - - return out; -} diff --git a/packages/ml-vision/lib/visionImageLabelerOptions.js b/packages/ml-vision/lib/visionImageLabelerOptions.js deleted file mode 100644 index 27f5104eed..0000000000 --- a/packages/ml-vision/lib/visionImageLabelerOptions.js +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2016-present Invertase Limited & Contributors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this library except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ - -import { - hasOwnProperty, - isNumber, - isObject, - isUndefined, -} from '@react-native-firebase/app/lib/common'; - -export default function visionImageLabelerOptions(imageLabelerOptions) { - const out = { - confidenceThreshold: 0.5, - }; - - if (isUndefined(imageLabelerOptions)) { - return out; - } - - if (!isObject(imageLabelerOptions)) { - throw new Error("'imageLabelerOptions' expected an object value."); - } - - if (hasOwnProperty(imageLabelerOptions, 'confidenceThreshold')) { - if (!isNumber(imageLabelerOptions.confidenceThreshold)) { - throw new Error( - "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1.", - ); - } - - if ( - imageLabelerOptions.confidenceThreshold < 0 || - imageLabelerOptions.confidenceThreshold > 1 - ) { - throw new Error( - "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1.", - ); - } - - out.confidenceThreshold = imageLabelerOptions.confidenceThreshold; - } - - return out; -} diff --git a/packages/ml-vision/type-test.ts b/packages/ml-vision/type-test.ts deleted file mode 100644 index c1c480303a..0000000000 --- a/packages/ml-vision/type-test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import firebase from '@react-native-firebase/app'; -import * as vision from '@react-native-firebase/ml-vision'; - -console.log(vision.default().app); - -// checks module exists at root -console.log(firebase.vision().app.name); - -// checks module exists at app level -console.log(firebase.app().vision().app.name); - -// checks statics exist -console.log(firebase.vision.SDK_VERSION); - -// checks statics exist on defaultExport -console.log(firebase.SDK_VERSION); - -// checks root exists -console.log(firebase.SDK_VERSION); - -// checks firebase named export exists on module -console.log(vision.firebase.SDK_VERSION); - -// checks multi-app support exists -console.log(firebase.vision(firebase.app()).app.name); - -// checks default export supports app arg -console.log(firebase.vision(firebase.app('foo')).app.name); - -console.log(firebase.vision.VisionBarcodeFormat.ALL_FORMATS); -console.log(vision.VisionBarcodeFormat); - -console.log(firebase.vision.VisionFaceContourType.ALL_POINTS); -console.log(vision.VisionFaceContourType.ALL_POINTS); - -console.log(firebase.vision.VisionFaceLandmarkType.LEFT_CHEEK); -console.log(vision.VisionFaceLandmarkType.LEFT_EAR); - -console.log(firebase.vision.VisionBarcodeValueType.CALENDAR_EVENT); -// console.log(vision.VisionBarcodeValueType.); - -console.log(firebase.vision.VisionFaceDetectorContourMode.ALL_CONTOURS); -console.log(vision.VisionFaceDetectorContourMode.ALL_CONTOURS); - -console.log(firebase.vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS); -console.log(vision.VisionFaceDetectorLandmarkMode.ALL_LANDMARKS); - -console.log(firebase.vision.VisionBarcodeWifiEncryptionType.WEP); -// console.log(vision.VisionBarcodeWifiEncryptionType.WEP); - -console.log(firebase.vision.VisionFaceDetectorPerformanceMode.ACCURATE); -console.log(vision.VisionFaceDetectorPerformanceMode.FAST); - -console.log(firebase.vision.VisionCloudTextRecognizerModelType.DENSE_MODEL); -console.log(vision.VisionCloudTextRecognizerModelType.SPARSE_MODEL); - -console.log(firebase.vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS); -console.log(vision.VisionFaceDetectorClassificationMode.ALL_CLASSIFICATIONS); - -console.log(firebase.vision.VisionDocumentTextRecognizedBreakType.EOL_SURE_SPACE); -console.log(vision.VisionDocumentTextRecognizedBreakType.HYPHEN); - -console.log(firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL); -console.log(vision.VisionCloudLandmarkRecognizerModelType.STABLE_MODEL); diff --git a/packages/ml-natural-language/.npmignore b/packages/ml/.npmignore similarity index 100% rename from packages/ml-natural-language/.npmignore rename to packages/ml/.npmignore diff --git a/packages/ml-vision/CHANGELOG.md b/packages/ml/CHANGELOG.md similarity index 100% rename from packages/ml-vision/CHANGELOG.md rename to packages/ml/CHANGELOG.md diff --git a/packages/ml-natural-language/LICENSE b/packages/ml/LICENSE similarity index 100% rename from packages/ml-natural-language/LICENSE rename to packages/ml/LICENSE diff --git a/packages/ml/README.md b/packages/ml/README.md new file mode 100644 index 0000000000..a33e17c7c5 --- /dev/null +++ b/packages/ml/README.md @@ -0,0 +1,102 @@ +

+ +
+
+

React Native Firebase - ML

+

+ +

+ Coverage + NPM downloads + NPM version + License + Maintained with Lerna +

+ +

+ Chat on Discord + Follow on Twitter + Follow on Facebook +

+ +--- + +Firebase Machine Learning is a mobile SDK that brings Google's machine learning expertise to Android and iOS apps in a powerful yet easy-to-use package. Whether you're new or experienced in machine learning, you can implement the functionality you need in just a few lines of code. There's no need to have deep knowledge of neural networks or model optimization to get started. On the other hand, if you are an experienced ML developer, Firebase ML provides convenient APIs that help you use your custom TensorFlow Lite models in your mobile apps. + +## Cloud vs. on-device + +Firebase ML has APIs that work either in the in the cloud or on the device. When we describe an ML API as being a cloud API or on-device API, we are describing which machine performs inference: that is, which machine uses the ML model to discover insights about the data you provide it. In Firebase ML, this happens either on Google Cloud, or on your users' mobile devices. + +The text recognition, image labeling, and landmark recognition APIs perform inference in the cloud. These models have more computational power and memory available to them than a comparable on-device model, and as a result, can perform inference with greater accuracy and precision than an on-device model. On the other hand, every request to these APIs requires a network round-trip, which makes them unsuitable for real-time and low-latency applications such as video processing. + +The custom model APIs and AutoML Vision Edge deal with ML models that run on the device. The models used and produced by these features are TensorFlow Lite models, which are optimized to run on mobile devices. The biggest advantage to these models is that they don't require a network connection and can run very quickly—fast enough, for example, to process frames of video in real time. + +Firebase ML provides two key capabilities around on-device custom models: + +- Custom model deployment: Deploy custom models to your users' devices by uploading them to our servers. Your Firebase-enabled app will download the model to the device on demand. This allows you to keep your app's initial install size small, and you can swap the ML model without having to republish your app. + +- AutoML Vision Edge: This service helps you create your own on-device custom image classification models with an easy-to-use web interface. Then, you can seamlessly host the models you create with the service mentioned above. + +## ML Kit: Ready-to-use on-device models + +> On June 3, 2020, Google started offering ML Kit's on-device APIs through a [new standalone SDK](https://developers.google.com/ml-kit). Cloud APIs, AutoML Vision Edge, and custom model deployment will continue to be available through Firebase Machine Learning. + +If you're looking for pre-trained models that run on the device, check out [the new standalone ML Kit](https://developers.google.com/ml-kit). Use the new [react-native-mlkit modules](https://www.npmjs.com/org/react-native-mlkit)) for most on-device use cases: + +- Text recognition +- Image labeling +- Object detection and tracking +- Face detection and contour tracing +- Barcode scanning +- Language identification +- Translation +- Smart Reply + +--- + +This react-native-firebase module currently supports the following Firebase ML APIs: + +| API | Supported | +| -------------------------------------------------------------------------------- | --------- | +| [Text Recognition](https://firebase.google.com/docs/ml/recognize-text) | ✅ | +| [Document Text Recognition](https://firebase.google.com/docs/ml/recognize-text)) | ✅ | +| [Image Labeling](https://firebase.google.com/docs/ml/label-images) | ✅ | +| [Landmark Recognition](https://firebase.google.com/docs/ml/recognize-landmarks) | ✅ | +| [AutoML Vision Edge](https://firebase.google.com/docs/ml/automl-image-labeling) | ❌ | +| [Custom Models](https://firebase.google.com/docs/ml/use-custom-models) | ❌ | + +[> Learn More](https://firebase.google.com/docs/ml) + +## Installation + +Requires `@react-native-firebase/app` to be installed. + +```bash +yarn add @react-native-firebase/ml +``` + +## Documentation + +- [Quick Start](https://rnfirebase.io/ml/usage) +- [Reference](https://rnfirebase.io/reference/ml) + +### Additional Topics + +- [Text Recognition](https://rnfirebase.io/ml/text-recognition) +- [Landmark Recognition](https://rnfirebase.io/ml/landmark-recognition) +- [Image Labeling](https://rnfirebase.io/ml/image-labeling) + +## License + +- See [LICENSE](/LICENSE) + +--- + +

+ +

+ Built and maintained with 💛 by Invertase. +

+

+ +--- diff --git a/packages/ml-vision/RNFBMLVision.podspec b/packages/ml/RNFBML.podspec similarity index 73% rename from packages/ml-vision/RNFBMLVision.podspec rename to packages/ml/RNFBML.podspec index 860c5f391e..693cb7715d 100644 --- a/packages/ml-vision/RNFBMLVision.podspec +++ b/packages/ml/RNFBML.podspec @@ -11,7 +11,7 @@ if coreVersionDetected != coreVersionRequired end Pod::Spec.new do |s| - s.name = "RNFBMLVision" + s.name = "RNFBML" s.version = package["version"] s.description = package["description"] s.summary = <<-DESC @@ -36,18 +36,6 @@ Pod::Spec.new do |s| # Firebase dependencies s.dependency 'Firebase/MLVision', firebase_sdk_version - if FirebaseJSON::Config.get_value_or_default('ml_vision_face_model', false) - s.dependency 'Firebase/MLVisionFaceModel', firebase_sdk_version - end - if FirebaseJSON::Config.get_value_or_default('ml_vision_ocr_model', false) - s.dependency 'Firebase/MLVisionTextModel', firebase_sdk_version - end - if FirebaseJSON::Config.get_value_or_default('ml_vision_barcode_model', false) - s.dependency 'Firebase/MLVisionBarcodeModel', firebase_sdk_version - end - if FirebaseJSON::Config.get_value_or_default('ml_vision_image_label_model', false) - s.dependency 'Firebase/MLVisionLabelModel', firebase_sdk_version - end if defined?($RNFirebaseAsStaticFramework) Pod::UI.puts "#{s.name}: Using overridden static_framework value of '#{$RNFirebaseAsStaticFramework}'" diff --git a/packages/ml-natural-language/android/.editorconfig b/packages/ml/android/.editorconfig similarity index 100% rename from packages/ml-natural-language/android/.editorconfig rename to packages/ml/android/.editorconfig diff --git a/packages/ml-vision/android/build.gradle b/packages/ml/android/build.gradle similarity index 91% rename from packages/ml-vision/android/build.gradle rename to packages/ml/android/build.gradle index 99ae046b8e..2db025b20e 100644 --- a/packages/ml-vision/android/build.gradle +++ b/packages/ml/android/build.gradle @@ -11,7 +11,7 @@ buildscript { } dependencies { - classpath("com.android.tools.build:gradle:4.0.1") + classpath("com.android.tools.build:gradle:4.1.0") } } } @@ -92,11 +92,8 @@ dependencies { implementation platform("com.google.firebase:firebase-bom:${ReactNative.ext.getVersion("firebase", "bom")}") implementation "com.google.firebase:firebase-ml-vision" - // This is necessary to fix known dependency issues in the SDK - // https://firebase.google.com/support/release-notes/android#bom_v25-8-0 implementation 'com.google.android.gms:play-services-vision:20.1.1' implementation 'com.google.android.gms:play-services-vision-common:19.1.1' - implementation 'com.google.firebase:firebase-ml-vision-image-label-model:20.0.2' implementation 'com.google.android.gms:play-services-vision-face-contour-internal:16.0.3' implementation 'com.google.android.gms:play-services-vision-image-labeling-internal:16.0.5' implementation 'com.google.android.gms:play-services-vision-image-label:18.0.5' @@ -104,8 +101,6 @@ dependencies { implementation 'com.google.firebase:firebase-ml-model-interpreter:22.0.4' } -apply from: file("./ml-models.gradle") - ReactNative.shared.applyPackageVersion() ReactNative.shared.applyDefaultExcludes() ReactNative.module.applyAndroidVersions() diff --git a/packages/ml-natural-language/android/lint.xml b/packages/ml/android/lint.xml similarity index 100% rename from packages/ml-natural-language/android/lint.xml rename to packages/ml/android/lint.xml diff --git a/packages/ml/android/settings.gradle b/packages/ml/android/settings.gradle new file mode 100644 index 0000000000..21d0e6de75 --- /dev/null +++ b/packages/ml/android/settings.gradle @@ -0,0 +1 @@ +rootProject.name = '@react-native-firebase_ml' diff --git a/packages/ml/android/src/main/AndroidManifest.xml b/packages/ml/android/src/main/AndroidManifest.xml new file mode 100644 index 0000000000..b7e0bbc379 --- /dev/null +++ b/packages/ml/android/src/main/AndroidManifest.xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java similarity index 98% rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java index f3be9b1690..dbc2475dbe 100644 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionCommon.java +++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLCommon.java @@ -1,10 +1,10 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; import com.google.firebase.ml.common.FirebaseMLException; import javax.annotation.Nullable; -class UniversalFirebaseMLVisionCommon { +class UniversalFirebaseMLCommon { static final String KEY_BOUNDING_BOX = "boundingBox"; static final String KEY_TEXT = "text"; static final String KEY_CONFIDENCE = "confidence"; diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java similarity index 96% rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java index 4139664db7..2ff78b56b5 100644 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionDocumentTextRecognizerModule.java +++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLDocumentTextRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -35,10 +35,10 @@ import javax.annotation.Nullable; import java.util.*; -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; +import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*; -class UniversalFirebaseMLVisionDocumentTextRecognizerModule extends UniversalFirebaseModule { - UniversalFirebaseMLVisionDocumentTextRecognizerModule(Context context, String serviceName) { +class UniversalFirebaseMLDocumentTextRecognizerModule extends UniversalFirebaseModule { + UniversalFirebaseMLDocumentTextRecognizerModule(Context context, String serviceName) { super(context, serviceName); } diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java similarity index 79% rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java index 786ba9cc6c..7d41db3a4f 100644 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionImageLabelerModule.java +++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLImageLabelerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -37,30 +37,13 @@ import java.util.List; import java.util.Map; -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; +import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*; -class UniversalFirebaseMLVisionImageLabelerModule extends UniversalFirebaseModule { - UniversalFirebaseMLVisionImageLabelerModule(Context context, String serviceName) { +class UniversalFirebaseMLImageLabelerModule extends UniversalFirebaseModule { + UniversalFirebaseMLImageLabelerModule(Context context, String serviceName) { super(context, serviceName); } - Task>> imageLabelerProcessImage(String appName, String stringUri, Bundle imageLabelerOptions) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseVisionOnDeviceImageLabelerOptions options = getOnDeviceImageLabelerOptions(imageLabelerOptions); - FirebaseVisionImageLabeler visionImageLabeler = FirebaseVision.getInstance(firebaseApp) - .getOnDeviceImageLabeler(options); - FirebaseVisionImage image = FirebaseVisionImage.fromFilePath( - getContext(), - SharedUtils.getUri(stringUri) - ); - - return processLabelerList( - Tasks.await(visionImageLabeler.processImage(image)) - ); - }); - } - Task>> cloudImageLabelerProcessImage(String appName, String stringUri, Bundle cloudImageLabelerOptions) { return Tasks.call(getExecutor(), () -> { FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java similarity index 94% rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java index 6ec562271b..20fc33626b 100644 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionLandmarkRecognizerModule.java +++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLLandmarkRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -37,10 +37,10 @@ import java.util.List; import java.util.Map; -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; +import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*; -class UniversalFirebaseMLVisionLandmarkRecognizerModule extends UniversalFirebaseModule { - UniversalFirebaseMLVisionLandmarkRecognizerModule(Context context, String serviceName) { +class UniversalFirebaseMLLandmarkRecognizerModule extends UniversalFirebaseModule { + UniversalFirebaseMLLandmarkRecognizerModule(Context context, String serviceName) { super(context, serviceName); } diff --git a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java similarity index 89% rename from packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java rename to packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java index de16c5f550..8a84efe8f4 100644 --- a/packages/ml-vision/android/src/main/java/io/invertase/firebase/ml/vision/UniversalFirebaseMLVisionTextRecognizerModule.java +++ b/packages/ml/android/src/main/java/io/invertase/firebase/ml/UniversalFirebaseMLTextRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -34,32 +34,13 @@ import java.util.*; -import static io.invertase.firebase.ml.vision.UniversalFirebaseMLVisionCommon.*; +import static io.invertase.firebase.ml.UniversalFirebaseMLCommon.*; -class UniversalFirebaseMLVisionTextRecognizerModule extends UniversalFirebaseModule { - UniversalFirebaseMLVisionTextRecognizerModule(Context context, String serviceName) { +class UniversalFirebaseMLTextRecognizerModule extends UniversalFirebaseModule { + UniversalFirebaseMLTextRecognizerModule(Context context, String serviceName) { super(context, serviceName); } - Task> textRecognizerProcessImage( - String appName, - String stringUri - ) { - return Tasks.call(getExecutor(), () -> { - FirebaseApp firebaseApp = FirebaseApp.getInstance(appName); - FirebaseVisionTextRecognizer detector = FirebaseVision.getInstance(firebaseApp) - .getOnDeviceTextRecognizer(); - - FirebaseVisionImage image = FirebaseVisionImage.fromFilePath( - getContext(), - SharedUtils.getUri(stringUri) - ); - - FirebaseVisionText result = Tasks.await(detector.processImage(image)); - return getFirebaseVisionTextMap(result); - }); - } - Task> cloudTextRecognizerProcessImage( String appName, String stringUri, diff --git a/packages/ml/android/src/reactnative/AndroidManifest.xml b/packages/ml/android/src/reactnative/AndroidManifest.xml new file mode 100644 index 0000000000..d55b471c2c --- /dev/null +++ b/packages/ml/android/src/reactnative/AndroidManifest.xml @@ -0,0 +1,2 @@ + + diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java similarity index 71% rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java index b69ee5adbb..2bbfb583d0 100644 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionDocumentTextRecognizerModule.java +++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLDocumentTextRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -20,13 +20,13 @@ import com.facebook.react.bridge.*; import io.invertase.firebase.common.ReactNativeFirebaseModule; -public class RNFirebaseMLVisionDocumentTextRecognizerModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionDocumentTextRecognizer"; - private final UniversalFirebaseMLVisionDocumentTextRecognizerModule module; +public class RNFirebaseMLDocumentTextRecognizerModule extends ReactNativeFirebaseModule { + private static final String SERVICE_NAME = "MLDocumentTextRecognizer"; + private final UniversalFirebaseMLDocumentTextRecognizerModule module; - RNFirebaseMLVisionDocumentTextRecognizerModule(ReactApplicationContext reactContext) { + RNFirebaseMLDocumentTextRecognizerModule(ReactApplicationContext reactContext) { super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionDocumentTextRecognizerModule(reactContext, SERVICE_NAME); + this.module = new UniversalFirebaseMLDocumentTextRecognizerModule(reactContext, SERVICE_NAME); } @ReactMethod @@ -41,7 +41,7 @@ public void cloudDocumentTextRecognizerProcessImage( if (task.isSuccessful()) { promise.resolve(Arguments.makeNativeMap(task.getResult())); } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( + String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException( task.getException()); rejectPromiseWithCodeAndMessage( promise, diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java similarity index 54% rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java index cacba5626a..09733e56e7 100644 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionBarcodeDetectorModule.java +++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLImageLabelerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -20,23 +20,25 @@ import com.facebook.react.bridge.*; import io.invertase.firebase.common.ReactNativeFirebaseModule; -public class RNFirebaseMLVisionBarcodeDetectorModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionBarcodeDetector"; - private final UniversalFirebaseMLVisionBarcodeDetectorModule module; +public class RNFirebaseMLImageLabelerModule extends ReactNativeFirebaseModule { + private static final String SERVICE_NAME = "MLImageLabeler"; + private final UniversalFirebaseMLImageLabelerModule module; - RNFirebaseMLVisionBarcodeDetectorModule(ReactApplicationContext reactContext) { + RNFirebaseMLImageLabelerModule(ReactApplicationContext reactContext) { super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionBarcodeDetectorModule(reactContext, SERVICE_NAME); + this.module = new UniversalFirebaseMLImageLabelerModule(reactContext, SERVICE_NAME); } @ReactMethod - public void barcodeDetectorProcessImage(String appName, String stringUri, ReadableMap barcodeDetectorOptions, Promise promise) { - module.barcodeDetectorProcessImage(appName, stringUri, Arguments.toBundle(barcodeDetectorOptions)) - .addOnCompleteListener(getExecutor(), task -> { + public void cloudImageLabelerProcessImage(String appName, String stringUri, ReadableMap cloudImageLabelerOptions, Promise promise) { + this.module.cloudImageLabelerProcessImage(appName, stringUri, Arguments.toBundle(cloudImageLabelerOptions)) + .addOnCompleteListener(task -> { if (task.isSuccessful()) { - promise.resolve(Arguments.makeNativeArray(task.getResult())); + promise.resolve( + Arguments.makeNativeArray(task.getResult()) + ); } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( + String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException( task.getException()); rejectPromiseWithCodeAndMessage( promise, @@ -47,5 +49,4 @@ public void barcodeDetectorProcessImage(String appName, String stringUri, Readab } }); } - } diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java similarity index 72% rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java index 71e731c7db..37730a8788 100644 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionLandmarkRecognizerModule.java +++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLLandmarkRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -20,13 +20,13 @@ import com.facebook.react.bridge.*; import io.invertase.firebase.common.ReactNativeFirebaseModule; -public class RNFirebaseMLVisionLandmarkRecognizerModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionLandmarkRecognizer"; - private UniversalFirebaseMLVisionLandmarkRecognizerModule module; +public class RNFirebaseMLLandmarkRecognizerModule extends ReactNativeFirebaseModule { + private static final String SERVICE_NAME = "MLLandmarkRecognizer"; + private UniversalFirebaseMLLandmarkRecognizerModule module; - RNFirebaseMLVisionLandmarkRecognizerModule(ReactApplicationContext reactContext) { + RNFirebaseMLLandmarkRecognizerModule(ReactApplicationContext reactContext) { super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionLandmarkRecognizerModule(reactContext, SERVICE_NAME); + this.module = new UniversalFirebaseMLLandmarkRecognizerModule(reactContext, SERVICE_NAME); } @ReactMethod @@ -38,7 +38,7 @@ public void cloudLandmarkRecognizerProcessImage(String appName, String stringUri Arguments.makeNativeArray(task.getResult()) ); } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( + String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException( task.getException()); rejectPromiseWithCodeAndMessage( promise, diff --git a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java similarity index 59% rename from packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java index e2bafafa8d..11231a32dc 100644 --- a/packages/ml-vision/android/src/reactnative/java/io/invertase/firebase/ml/vision/RNFirebaseMLVisionFaceDetectorModule.java +++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/RNFirebaseMLTextRecognizerModule.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.vision; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -20,30 +20,28 @@ import com.facebook.react.bridge.*; import io.invertase.firebase.common.ReactNativeFirebaseModule; -public class RNFirebaseMLVisionFaceDetectorModule extends ReactNativeFirebaseModule { - private static final String SERVICE_NAME = "MLVisionFaceDetector"; - private final UniversalFirebaseMLVisionFaceDetectorModule module; +public class RNFirebaseMLTextRecognizerModule extends ReactNativeFirebaseModule { + private static final String SERVICE_NAME = "MLTextRecognizer"; + private final UniversalFirebaseMLTextRecognizerModule module; - RNFirebaseMLVisionFaceDetectorModule(ReactApplicationContext reactContext) { + RNFirebaseMLTextRecognizerModule(ReactApplicationContext reactContext) { super(reactContext, SERVICE_NAME); - this.module = new UniversalFirebaseMLVisionFaceDetectorModule(reactContext, SERVICE_NAME); + this.module = new UniversalFirebaseMLTextRecognizerModule(reactContext, SERVICE_NAME); } @ReactMethod - public void faceDetectorProcessImage( + public void cloudTextRecognizerProcessImage( String appName, String stringUri, - ReadableMap faceDetectorOptionsMap, + ReadableMap cloudTextRecognizerOptions, Promise promise ) { - module.faceDetectorProcessImage(appName, stringUri, Arguments.toBundle(faceDetectorOptionsMap)) + module.cloudTextRecognizerProcessImage(appName, stringUri, Arguments.toBundle(cloudTextRecognizerOptions)) .addOnCompleteListener(getExecutor(), task -> { if (task.isSuccessful()) { - promise.resolve( - Arguments.makeNativeArray(task.getResult()) - ); + promise.resolve(Arguments.makeNativeMap(task.getResult())); } else { - String[] errorCodeAndMessage = UniversalFirebaseMLVisionCommon.getErrorCodeAndMessageFromException( + String[] errorCodeAndMessage = UniversalFirebaseMLCommon.getErrorCodeAndMessageFromException( task.getException()); rejectPromiseWithCodeAndMessage( promise, @@ -54,5 +52,4 @@ public void faceDetectorProcessImage( } }); } - } diff --git a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java similarity index 63% rename from packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java rename to packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java index 4b2359d60a..268b8618c1 100644 --- a/packages/ml-natural-language/android/src/reactnative/java/io/invertase/firebase/ml/naturallanguage/ReactNativeFirebaseMLNaturalLanguagePackage.java +++ b/packages/ml/android/src/reactnative/java/io/invertase/firebase/ml/ReactNativeFirebaseMLPackage.java @@ -1,4 +1,4 @@ -package io.invertase.firebase.ml.naturallanguage; +package io.invertase.firebase.ml; /* * Copyright (c) 2016-present Invertase Limited & Contributors @@ -21,40 +21,23 @@ import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.uimanager.ViewManager; +import io.invertase.firebase.common.ReactNativeFirebaseJSON; +import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import javax.annotation.Nonnull; - -import io.invertase.firebase.common.ReactNativeFirebaseJSON; - @SuppressWarnings("unused") -public class ReactNativeFirebaseMLNaturalLanguagePackage implements ReactPackage { +public class ReactNativeFirebaseMLPackage implements ReactPackage { @Nonnull @Override public List createNativeModules(@Nonnull ReactApplicationContext reactContext) { List modules = new ArrayList<>(); - - if (ReactNativeFirebaseJSON - .getSharedInstance() - .getBooleanValue("ml_natural_language_language_id_model", false)) { - modules.add(new RNFirebaseMLNaturalLanguageIdModule(reactContext)); - } - - if (ReactNativeFirebaseJSON - .getSharedInstance() - .getBooleanValue("ml_natural_language_translate_model", false)) { - modules.add(new RNFirebaseMLNaturalLanguageTranslateModule(reactContext)); - } - - if (ReactNativeFirebaseJSON - .getSharedInstance() - .getBooleanValue("ml_natural_language_smart_reply_model", false)) { - modules.add(new RNFirebaseMLNaturalLanguageSmartReplyModule(reactContext)); - } - + modules.add(new RNFirebaseMLTextRecognizerModule(reactContext)); + modules.add(new RNFirebaseMLLandmarkRecognizerModule(reactContext)); + modules.add(new RNFirebaseMLDocumentTextRecognizerModule(reactContext)); + modules.add(new RNFirebaseMLImageLabelerModule(reactContext)); return modules; } diff --git a/packages/ml-vision/e2e/documentText.e2e.js b/packages/ml/e2e/documentText.e2e.js similarity index 83% rename from packages/ml-vision/e2e/documentText.e2e.js rename to packages/ml/e2e/documentText.e2e.js index 4be8f13122..be1bc4d28d 100644 --- a/packages/ml-vision/e2e/documentText.e2e.js +++ b/packages/ml/e2e/documentText.e2e.js @@ -42,7 +42,7 @@ function documentTextBaseElementValidate(documentTextBase) { let testImageFile; -describe('mlkit.vision.document.text', () => { +describe('ml.document.text', () => { before(async () => { testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/text.png`; await firebase @@ -51,10 +51,10 @@ describe('mlkit.vision.document.text', () => { .writeToFile(testImageFile); }); - describe('VisionCloudDocumentTextRecognizerOptions', () => { + describe('MLCloudDocumentTextRecognizerOptions', () => { it('throws if not an object', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, 'foo'); + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, 'foo'); return Promise.reject(new Error('Did not throw Error.')); } catch (e) { e.message.should.containEql( @@ -66,7 +66,7 @@ describe('mlkit.vision.document.text', () => { it('throws if enforceCertFingerprintMatch is not a boolean', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: 'true', }); return Promise.reject(new Error('Did not throw Error.')); @@ -79,14 +79,14 @@ describe('mlkit.vision.document.text', () => { }); it('sets enforceCertFingerprintMatch', async () => { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: false, }); }); it('throws if apiKeyOverride is not a string', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { apiKeyOverride: true, }); return Promise.reject(new Error('Did not throw Error.')); @@ -100,7 +100,7 @@ describe('mlkit.vision.document.text', () => { it('throws if languageHints is not an array', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { languageHints: 'en', }); return Promise.reject(new Error('Did not throw Error.')); @@ -114,7 +114,7 @@ describe('mlkit.vision.document.text', () => { it('throws if languageHints is empty array', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { languageHints: [], }); return Promise.reject(new Error('Did not throw Error.')); @@ -128,7 +128,7 @@ describe('mlkit.vision.document.text', () => { it('throws if languageHints contains non-string', async () => { try { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { languageHints: [123], }); return Promise.reject(new Error('Did not throw Error.')); @@ -141,7 +141,7 @@ describe('mlkit.vision.document.text', () => { }); it('sets hinted languages', async () => { - await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile, { languageHints: ['fr'], }); }); @@ -150,7 +150,7 @@ describe('mlkit.vision.document.text', () => { describe('cloudDocumentTextRecognizerProcessImage()', () => { it('should throw if image path is not a string', () => { try { - firebase.vision().cloudDocumentTextRecognizerProcessImage(123); + firebase.ml().cloudDocumentTextRecognizerProcessImage(123); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'localImageFilePath' expected a string local file path"); @@ -158,8 +158,8 @@ describe('mlkit.vision.document.text', () => { } }); - it('should return a VisionDocumentText representation for an image', async () => { - const res = await firebase.vision().cloudDocumentTextRecognizerProcessImage(testImageFile); + it('should return a MLDocumentText representation for an image', async () => { + const res = await firebase.ml().cloudDocumentTextRecognizerProcessImage(testImageFile); res.text.should.be.a.String(); res.blocks.should.be.an.Array(); diff --git a/packages/ml-vision/e2e/label.e2e.js b/packages/ml/e2e/label.e2e.js similarity index 51% rename from packages/ml-vision/e2e/label.e2e.js rename to packages/ml/e2e/label.e2e.js index d305c41f1f..7b7c39ccb9 100644 --- a/packages/ml-vision/e2e/label.e2e.js +++ b/packages/ml/e2e/label.e2e.js @@ -17,7 +17,7 @@ let testImageFile; -describe('mlkit.vision.label', () => { +describe('ml.label', () => { before(async () => { testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/crab.jpg`; await firebase @@ -26,35 +26,10 @@ describe('mlkit.vision.label', () => { .writeToFile(testImageFile); }); - describe('imageLabelerProcessImage()', () => { - it('should throw if image path is not a string', () => { - try { - firebase.vision().imageLabelerProcessImage(123); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'localImageFilePath' expected a string local file path"); - return Promise.resolve(); - } - }); - - it('should return a local label array', async () => { - const res = await firebase.vision().imageLabelerProcessImage(testImageFile); - - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - i.text.should.be.String(); - i.entityId.should.be.String(); - i.confidence.should.be.Number(); - }); - }); - }); - describe('cloudImageLabelerProcessImage()', () => { it('should throw if image path is not a string', () => { try { - firebase.vision().cloudImageLabelerProcessImage(123); + firebase.ml().cloudImageLabelerProcessImage(123); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'localImageFilePath' expected a string local file path"); @@ -62,65 +37,8 @@ describe('mlkit.vision.label', () => { } }); - it('should return a cloud label array', async () => { - const res = await firebase.vision().cloudImageLabelerProcessImage(testImageFile); - - res.should.be.Array(); - res.length.should.be.greaterThan(0); - - res.forEach(i => { - i.text.should.be.String(); - i.entityId.should.be.String(); - i.confidence.should.be.Number(); - }); - }); - }); - - describe('VisionImageLabelerOptions', () => { - it('throws if not an object', async () => { - try { - await firebase.vision().imageLabelerProcessImage(testImageFile, '123'); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'imageLabelerOptions' expected an object value"); - return Promise.resolve(); - } - }); - - describe('confidenceThreshold', () => { - it('should throw if confidence threshold is not a number', async () => { - try { - await firebase.vision().imageLabelerProcessImage(testImageFile, { - confidenceThreshold: '0.5', - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1", - ); - return Promise.resolve(); - } - }); - }); - - it('should throw if confidence threshold is not between 0 & 1', async () => { - try { - await firebase.vision().imageLabelerProcessImage(testImageFile, { - confidenceThreshold: -0.2, - }); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql( - "'imageLabelerOptions.confidenceThreshold' expected a number value between 0 & 1", - ); - return Promise.resolve(); - } - }); - - it('should accept options and return local labels', async () => { - const res = await firebase.vision().imageLabelerProcessImage(testImageFile, { - confidenceThreshold: 0.8, - }); + xit('should return a cloud label array', async () => { + const res = await firebase.ml().cloudImageLabelerProcessImage(testImageFile); res.should.be.Array(); res.length.should.be.greaterThan(0); @@ -133,10 +51,10 @@ describe('mlkit.vision.label', () => { }); }); - describe('VisionCloudImageLabelerOptions', () => { + describe('MLCloudImageLabelerOptions', () => { it('throws if not an object', async () => { try { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, '123'); + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, '123'); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'cloudImageLabelerOptions' expected an object value"); @@ -147,7 +65,7 @@ describe('mlkit.vision.label', () => { describe('confidenceThreshold', () => { it('should throw if confidence threshold is not a number', async () => { try { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { confidenceThreshold: '0.2', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -161,7 +79,7 @@ describe('mlkit.vision.label', () => { it('should throw if confidence threshold is not between 0 & 1', async () => { try { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { confidenceThreshold: 1.1, }); return Promise.reject(new Error('Did not throw an Error.')); @@ -173,8 +91,8 @@ describe('mlkit.vision.label', () => { } }); - it('should accept options and return cloud labels', async () => { - const res = await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + xit('should accept options and return cloud labels', async () => { + const res = await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { confidenceThreshold: 0.8, }); @@ -192,7 +110,7 @@ describe('mlkit.vision.label', () => { describe('enforceCertFingerprintMatch', () => { it('throws if not a boolean', async () => { try { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { enforceCertFingerprintMatch: 'true', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -204,17 +122,17 @@ describe('mlkit.vision.label', () => { } }); - it('sets enforceCertFingerprintMatch', async () => { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + xit('sets enforceCertFingerprintMatch', async () => { + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { enforceCertFingerprintMatch: false, }); }); }); - describe('apiKeyOverride', () => { + xdescribe('apiKeyOverride', () => { it('throws if apiKeyOverride is not a string', async () => { try { - await firebase.vision().cloudImageLabelerProcessImage(testImageFile, { + await firebase.ml().cloudImageLabelerProcessImage(testImageFile, { apiKeyOverride: true, }); return Promise.reject(new Error('Did not throw Error.')); diff --git a/packages/ml-vision/e2e/landmark.e2e.js b/packages/ml/e2e/landmark.e2e.js similarity index 71% rename from packages/ml-vision/e2e/landmark.e2e.js rename to packages/ml/e2e/landmark.e2e.js index fbe6b94301..1d053c7f60 100644 --- a/packages/ml-vision/e2e/landmark.e2e.js +++ b/packages/ml/e2e/landmark.e2e.js @@ -16,7 +16,7 @@ */ let testImageFile; -describe('mlkit.vision.landmark', () => { +describe('ml.landmark', () => { before(async () => { testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/landmark.jpg`; await firebase @@ -28,7 +28,7 @@ describe('mlkit.vision.landmark', () => { describe('cloudLandmarkRecognizerProcessImage()', () => { it('should throw if image path is not a string', () => { try { - firebase.vision().cloudLandmarkRecognizerProcessImage(123); + firebase.ml().cloudLandmarkRecognizerProcessImage(123); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'localImageFilePath' expected a string local file path"); @@ -36,8 +36,8 @@ describe('mlkit.vision.landmark', () => { } }); - it('should return an array of landmark information', async () => { - const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile); + xit('should return an array of landmark information', async () => { + const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile); res.should.be.Array(); res.length.should.be.greaterThan(0); @@ -59,10 +59,10 @@ describe('mlkit.vision.landmark', () => { }); }); - describe('VisionCloudLandmarkRecognizerOptions', () => { + describe('MLCloudLandmarkRecognizerOptions', () => { it('throws if not an object', async () => { try { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, '123'); + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, '123'); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql( @@ -75,7 +75,7 @@ describe('mlkit.vision.landmark', () => { describe('cloudLandmarkRecognizerOptions', () => { it('throws if not a boolean', async () => { try { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: 'false', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -87,15 +87,15 @@ describe('mlkit.vision.landmark', () => { } }); - it('sets cloudLandmarkRecognizerOptions', async () => { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + xit('sets cloudLandmarkRecognizerOptions', async () => { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: false, }); }); it('throws if apiKeyOverride is not a string', async () => { try { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { apiKeyOverride: true, }); return Promise.reject(new Error('Did not throw Error.')); @@ -108,10 +108,10 @@ describe('mlkit.vision.landmark', () => { }); }); // TODO temporarily disable test suite - is flakey on CI - needs investigating - xdescribe('maxResults', () => { + describe('maxResults', () => { it('throws if maxResults is not a number', async () => { try { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { maxResults: '2', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -123,8 +123,8 @@ describe('mlkit.vision.landmark', () => { } }); - it('limits the maximum results', async () => { - const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + xit('limits the maximum results', async () => { + const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { maxResults: 3, }); @@ -138,7 +138,7 @@ describe('mlkit.vision.landmark', () => { describe('modelType', () => { it('throws if model is invalid', async () => { try { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { modelType: 3, }); return Promise.reject(new Error('Did not throw an Error.')); @@ -150,19 +150,19 @@ describe('mlkit.vision.landmark', () => { } }); - it('sets modelType', async () => { - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { - modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.STABLE_MODEL, + xit('sets modelType', async () => { + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { + modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.STABLE_MODEL, }); - await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { - modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL, + await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { + modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL, }); }); - it('uses a latest model', async () => { - const res = await firebase.vision().cloudLandmarkRecognizerProcessImage(testImageFile, { - modelType: firebase.vision.VisionCloudLandmarkRecognizerModelType.LATEST_MODEL, + xit('uses a latest model', async () => { + const res = await firebase.ml().cloudLandmarkRecognizerProcessImage(testImageFile, { + modelType: firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL, }); res.should.be.Array(); }); diff --git a/packages/ml-vision/e2e/mlKitVision.e2e.js b/packages/ml/e2e/ml.e2e.js similarity index 75% rename from packages/ml-vision/e2e/mlKitVision.e2e.js rename to packages/ml/e2e/ml.e2e.js index ca938d3935..5f19eee1a6 100644 --- a/packages/ml-vision/e2e/mlKitVision.e2e.js +++ b/packages/ml/e2e/ml.e2e.js @@ -15,24 +15,22 @@ * */ -describe('vision()', () => { +describe('ml()', () => { describe('namespace', () => { it('accessible from firebase.app()', () => { const app = firebase.app(); - should.exist(app.vision); - app.vision().app.should.equal(app); + should.exist(app.ml); + app.ml().app.should.equal(app); }); it('supports multiple apps', async () => { - firebase.vision().app.name.should.equal('[DEFAULT]'); + firebase.ml().app.name.should.equal('[DEFAULT]'); - firebase - .vision(firebase.app('secondaryFromNative')) - .app.name.should.equal('secondaryFromNative'); + firebase.ml(firebase.app('secondaryFromNative')).app.name.should.equal('secondaryFromNative'); firebase .app('secondaryFromNative') - .vision() + .ml() .app.name.should.equal('secondaryFromNative'); }); }); diff --git a/packages/ml-vision/e2e/text.e2e.js b/packages/ml/e2e/text.e2e.js similarity index 71% rename from packages/ml-vision/e2e/text.e2e.js rename to packages/ml/e2e/text.e2e.js index 13c9da73ff..73ddd02864 100644 --- a/packages/ml-vision/e2e/text.e2e.js +++ b/packages/ml/e2e/text.e2e.js @@ -69,8 +69,7 @@ function textBaseElementValidate(textBase, cloud = false) { let testImageFile; -// TODO allow android testing once ML model download manager support implemented -ios.describe('mlkit.vision.text', () => { +describe('ml.text', () => { before(async () => { testImageFile = `${firebase.utils.FilePath.DOCUMENT_DIRECTORY}/text.png`; await firebase @@ -79,43 +78,10 @@ ios.describe('mlkit.vision.text', () => { .writeToFile(testImageFile); }); - describe('textRecognizerProcessImage()', () => { - it('should throw if image path is not a string', () => { - try { - firebase.vision().textRecognizerProcessImage(123); - return Promise.reject(new Error('Did not throw an Error.')); - } catch (error) { - error.message.should.containEql("'localImageFilePath' expected a string local file path"); - return Promise.resolve(); - } - }); - - it('should return a VisionText representation for an image', async () => { - const res = await firebase.vision().textRecognizerProcessImage(testImageFile); - res.text.should.be.a.String(); - res.blocks.should.be.an.Array(); - res.blocks.length.should.be.greaterThan(0); - - res.blocks.forEach(textBlock => { - textBaseElementValidate(textBlock); - textBlock.lines.should.be.an.Array(); - textBlock.lines.length.should.be.greaterThan(0); - textBlock.lines.forEach(line => { - textBaseElementValidate(line); - line.elements.should.be.an.Array(); - line.elements.length.should.be.greaterThan(0); - line.elements.forEach(element => { - textBaseElementValidate(element); - }); - }); - }); - }); - }); - - describe('VisionCloudTextRecognizerOptions', () => { + describe('MLCloudTextRecognizerOptions', () => { it('throws if not an object', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, '123'); + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, '123'); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'cloudTextRecognizerOptions' expected an object value"); @@ -126,7 +92,7 @@ ios.describe('mlkit.vision.text', () => { describe('enforceCertFingerprintMatch', () => { it('throws if not a boolean', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: 'false', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -139,7 +105,7 @@ ios.describe('mlkit.vision.text', () => { }); it('sets a value', async () => { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { enforceCertFingerprintMatch: false, }); }); @@ -148,7 +114,7 @@ ios.describe('mlkit.vision.text', () => { describe('apiKeyOverride', () => { it('throws if apiKeyOverride is not a string', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { apiKeyOverride: true, }); return Promise.reject(new Error('Did not throw Error.')); @@ -164,7 +130,7 @@ ios.describe('mlkit.vision.text', () => { describe('languageHints', () => { it('throws if not array', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { languageHints: 'en', }); return Promise.reject(new Error('Did not throw an Error.')); @@ -178,7 +144,7 @@ ios.describe('mlkit.vision.text', () => { it('throws if empty array', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { languageHints: [], }); return Promise.reject(new Error('Did not throw an Error.')); @@ -192,7 +158,7 @@ ios.describe('mlkit.vision.text', () => { it('throws if array contains non-string values', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { languageHints: [123], }); return Promise.reject(new Error('Did not throw an Error.')); @@ -205,7 +171,7 @@ ios.describe('mlkit.vision.text', () => { }); it('sets hintedLanguages', async () => { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { languageHints: ['fr'], }); }); @@ -214,7 +180,7 @@ ios.describe('mlkit.vision.text', () => { describe('modelType', () => { it('throws if invalid type', async () => { try { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { modelType: 7, }); return Promise.reject(new Error('Did not throw an Error.')); @@ -225,12 +191,8 @@ ios.describe('mlkit.vision.text', () => { }); it('sets modelType', async () => { - await firebase.vision().cloudTextRecognizerProcessImage(testImageFile, { - modelType: firebase.vision.VisionCloudTextRecognizerModelType.SPARSE_MODEL, - }); - - await firebase.vision().textRecognizerProcessImage(testImageFile, { - modelType: firebase.vision.VisionCloudTextRecognizerModelType.DENSE_MODEL, + await firebase.ml().cloudTextRecognizerProcessImage(testImageFile, { + modelType: firebase.ml.MLCloudTextRecognizerModelType.SPARSE_MODEL, }); }); }); @@ -239,7 +201,7 @@ ios.describe('mlkit.vision.text', () => { describe('cloudTextRecognizerProcessImage()', () => { it('should throw if image path is not a string', () => { try { - firebase.vision().cloudTextRecognizerProcessImage(123); + firebase.ml().cloudTextRecognizerProcessImage(123); return Promise.reject(new Error('Did not throw an Error.')); } catch (error) { error.message.should.containEql("'localImageFilePath' expected a string local file path"); @@ -248,7 +210,7 @@ ios.describe('mlkit.vision.text', () => { }); it('should return a VisionText representation for an image', async () => { - const res = await firebase.vision().cloudTextRecognizerProcessImage(testImageFile); + const res = await firebase.ml().cloudTextRecognizerProcessImage(testImageFile); res.text.should.be.a.String(); res.blocks.should.be.an.Array(); res.blocks.length.should.be.greaterThan(0); diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj b/packages/ml/ios/RNFBML.xcodeproj/project.pbxproj similarity index 66% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj rename to packages/ml/ios/RNFBML.xcodeproj/project.pbxproj index f99e91f0d0..ab76410fa2 100644 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.pbxproj +++ b/packages/ml/ios/RNFBML.xcodeproj/project.pbxproj @@ -7,11 +7,11 @@ objects = { /* Begin PBXBuildFile section */ - 27038A8322A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m in Sources */ = {isa = PBXBuildFile; fileRef = 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */; }; - 2744B98621F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */; }; - 27760EA1229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */; }; - 27760EA4229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */; }; - 27760EA722A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m in Sources */ = {isa = PBXBuildFile; fileRef = 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */; }; + 8B06D3F322F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */; }; + 8B06D3FC22F863AE00A5B542 /* RNFBMLCommon.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */; }; + 8B06D40622F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */; }; + 8B06D40A22F989EF00A5B542 /* RNFBMLTextRecognizerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */; }; + 8B06D40E22F99DF900A5B542 /* RNFBMLImageLabelerModule.m in Sources */ = {isa = PBXBuildFile; fileRef = 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -27,17 +27,17 @@ /* End PBXCopyFilesBuildPhase section */ /* Begin PBXFileReference section */ - 27038A8122A16C31001E082B /* RCTConvert+FIRLanguageIdentificationOptions.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "RCTConvert+FIRLanguageIdentificationOptions.h"; sourceTree = ""; }; - 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = "RCTConvert+FIRLanguageIdentificationOptions.m"; sourceTree = ""; }; - 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBMLNaturalLanguage.a; sourceTree = BUILT_PRODUCTS_DIR; }; - 2744B98421F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = RNFBMLNaturalLanguageIdModule.h; path = RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h; sourceTree = SOURCE_ROOT; }; - 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; name = RNFBMLNaturalLanguageIdModule.m; path = RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.m; sourceTree = SOURCE_ROOT; }; - 27760E9F229ED5B400F5F127 /* RNFBMLNaturalLanguageTranslateModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLNaturalLanguageTranslateModule.h; sourceTree = ""; }; - 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLNaturalLanguageTranslateModule.m; sourceTree = ""; }; - 27760EA2229ED5F600F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLNaturalLanguageSmartReplyModule.h; sourceTree = ""; }; - 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLNaturalLanguageSmartReplyModule.m; sourceTree = ""; }; - 27760EA522A0064100F5F127 /* RCTConvert+FIRTextMessageArray.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "RCTConvert+FIRTextMessageArray.h"; path = "RNFBMLNaturalLanguage/RCTConvert+FIRTextMessageArray.h"; sourceTree = SOURCE_ROOT; }; - 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = "RCTConvert+FIRTextMessageArray.m"; sourceTree = ""; }; + 2744B98221F45429004F8E3F /* libRNFBML.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libRNFBML.a; sourceTree = BUILT_PRODUCTS_DIR; }; + 8B06D3F122F84F6500A5B542 /* RNFBMLLandmarkRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLLandmarkRecognizerModule.h; sourceTree = ""; }; + 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLLandmarkRecognizerModule.m; sourceTree = ""; }; + 8B06D3FA22F863A400A5B542 /* RNFBMLCommon.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLCommon.h; sourceTree = ""; }; + 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLCommon.m; sourceTree = ""; }; + 8B06D40422F97B3600A5B542 /* RNFBMLDocumentTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLDocumentTextRecognizerModule.h; sourceTree = ""; }; + 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLDocumentTextRecognizerModule.m; sourceTree = ""; }; + 8B06D40822F989E400A5B542 /* RNFBMLTextRecognizerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLTextRecognizerModule.h; sourceTree = ""; }; + 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLTextRecognizerModule.m; sourceTree = ""; }; + 8B06D40C22F99DEF00A5B542 /* RNFBMLImageLabelerModule.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RNFBMLImageLabelerModule.h; sourceTree = ""; }; + 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RNFBMLImageLabelerModule.m; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -54,32 +54,32 @@ 2744B97521F452B8004F8E3F /* Products */ = { isa = PBXGroup; children = ( - 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */, + 2744B98221F45429004F8E3F /* libRNFBML.a */, ); name = Products; sourceTree = ""; }; - 2744B98321F45429004F8E3F /* RNFBMLNaturalLanguage */ = { + 2744B98321F45429004F8E3F /* RNFBML */ = { isa = PBXGroup; children = ( - 27760EA522A0064100F5F127 /* RCTConvert+FIRTextMessageArray.h */, - 27760EA622A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m */, - 2744B98421F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.h */, - 2744B98521F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m */, - 27760E9F229ED5B400F5F127 /* RNFBMLNaturalLanguageTranslateModule.h */, - 27760EA0229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m */, - 27760EA2229ED5F600F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.h */, - 27760EA3229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m */, - 27038A8122A16C31001E082B /* RCTConvert+FIRLanguageIdentificationOptions.h */, - 27038A8222A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m */, + 8B06D3F122F84F6500A5B542 /* RNFBMLLandmarkRecognizerModule.h */, + 8B06D3F222F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m */, + 8B06D3FA22F863A400A5B542 /* RNFBMLCommon.h */, + 8B06D3FB22F863AE00A5B542 /* RNFBMLCommon.m */, + 8B06D40422F97B3600A5B542 /* RNFBMLDocumentTextRecognizerModule.h */, + 8B06D40522F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m */, + 8B06D40822F989E400A5B542 /* RNFBMLTextRecognizerModule.h */, + 8B06D40922F989EF00A5B542 /* RNFBMLTextRecognizerModule.m */, + 8B06D40C22F99DEF00A5B542 /* RNFBMLImageLabelerModule.h */, + 8B06D40D22F99DF900A5B542 /* RNFBMLImageLabelerModule.m */, ); - path = RNFBMLNaturalLanguage; + path = RNFBML; sourceTree = ""; }; 3323F52AAFE26B7384BE4DE3 = { isa = PBXGroup; children = ( - 2744B98321F45429004F8E3F /* RNFBMLNaturalLanguage */, + 2744B98321F45429004F8E3F /* RNFBML */, 2744B97521F452B8004F8E3F /* Products */, ); sourceTree = ""; @@ -87,9 +87,9 @@ /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - 2744B98121F45429004F8E3F /* RNFBMLNaturalLanguage */ = { + 2744B98121F45429004F8E3F /* RNFBML */ = { isa = PBXNativeTarget; - buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLNaturalLanguage" */; + buildConfigurationList = 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBML" */; buildPhases = ( 2744B97E21F45429004F8E3F /* Sources */, 2744B97F21F45429004F8E3F /* Frameworks */, @@ -99,9 +99,9 @@ ); dependencies = ( ); - name = RNFBMLNaturalLanguage; - productName = RNFBMLNaturalLanguage; - productReference = 2744B98221F45429004F8E3F /* libRNFBMLNaturalLanguage.a */; + name = RNFBML; + productName = RNFBML; + productReference = 2744B98221F45429004F8E3F /* libRNFBML.a */; productType = "com.apple.product-type.library.static"; }; /* End PBXNativeTarget section */ @@ -110,7 +110,7 @@ 3323F95273A95DB34F55C6D7 /* Project object */ = { isa = PBXProject; attributes = { - CLASSPREFIX = RNFBMLNaturalLanguage; + CLASSPREFIX = RNFBML; LastUpgradeCheck = 1010; ORGANIZATIONNAME = Invertase; TargetAttributes = { @@ -120,11 +120,12 @@ }; }; }; - buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLNaturalLanguage" */; + buildConfigurationList = 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBML" */; compatibilityVersion = "Xcode 8.0"; developmentRegion = English; hasScannedForEncodings = 0; knownRegions = ( + English, en, ); mainGroup = 3323F52AAFE26B7384BE4DE3; @@ -132,7 +133,7 @@ projectDirPath = ""; projectRoot = ""; targets = ( - 2744B98121F45429004F8E3F /* RNFBMLNaturalLanguage */, + 2744B98121F45429004F8E3F /* RNFBML */, ); }; /* End PBXProject section */ @@ -142,11 +143,11 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - 27760EA4229ED74E00F5F127 /* RNFBMLNaturalLanguageSmartReplyModule.m in Sources */, - 2744B98621F45429004F8E3F /* RNFBMLNaturalLanguageIdModule.m in Sources */, - 27038A8322A16C43001E082B /* RCTConvert+FIRLanguageIdentificationOptions.m in Sources */, - 27760EA722A0064100F5F127 /* RCTConvert+FIRTextMessageArray.m in Sources */, - 27760EA1229ED5D000F5F127 /* RNFBMLNaturalLanguageTranslateModule.m in Sources */, + 8B06D40E22F99DF900A5B542 /* RNFBMLImageLabelerModule.m in Sources */, + 8B06D40622F97B4900A5B542 /* RNFBMLDocumentTextRecognizerModule.m in Sources */, + 8B06D40A22F989EF00A5B542 /* RNFBMLTextRecognizerModule.m in Sources */, + 8B06D3F322F84F7200A5B542 /* RNFBMLLandmarkRecognizerModule.m in Sources */, + 8B06D3FC22F863AE00A5B542 /* RNFBMLCommon.m in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -347,7 +348,7 @@ /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBMLNaturalLanguage" */ = { + 2744B98821F45429004F8E3F /* Build configuration list for PBXNativeTarget "RNFBML" */ = { isa = XCConfigurationList; buildConfigurations = ( 2744B98921F45429004F8E3F /* Debug */, @@ -356,7 +357,7 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBMLNaturalLanguage" */ = { + 3323F1C5716BA966BBBB95A4 /* Build configuration list for PBXProject "RNFBML" */ = { isa = XCConfigurationList; buildConfigurations = ( 3323F7E33E1559A2B9826720 /* Debug */, diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/contents.xcworkspacedata similarity index 100% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/contents.xcworkspacedata rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/contents.xcworkspacedata diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist similarity index 100% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings b/packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings similarity index 100% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings rename to packages/ml/ios/RNFBML.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/xcshareddata/IDETemplateMacros.plist b/packages/ml/ios/RNFBML.xcodeproj/xcshareddata/IDETemplateMacros.plist similarity index 100% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage.xcodeproj/xcshareddata/IDETemplateMacros.plist rename to packages/ml/ios/RNFBML.xcodeproj/xcshareddata/IDETemplateMacros.plist diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h b/packages/ml/ios/RNFBML/RNFBMLCommon.h similarity index 75% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h rename to packages/ml/ios/RNFBML/RNFBMLCommon.h index e55df485d9..afbe07d173 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionCommon.h +++ b/packages/ml/ios/RNFBML/RNFBMLCommon.h @@ -15,15 +15,11 @@ * */ -@interface RNFBMLVisionCommon : NSObject +@interface RNFBMLCommon : NSObject + (NSArray *)rectToIntArray:(CGRect)rect; -+ (NSDictionary *)contourToDict:(FIRVisionFaceContour *)visionFaceContour; - -+ (NSDictionary *)landmarkToDict:(FIRVisionFaceLandmark *)visionFaceLandmark; - -+ (NSArray *)visionPointsToArray:(NSArray *_Nullable)points; ++ (NSArray *)pointsToArray:(NSArray *_Nullable)points; + (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)( NSArray *errorCodeMessageArray, diff --git a/packages/ml/ios/RNFBML/RNFBMLCommon.m b/packages/ml/ios/RNFBML/RNFBMLCommon.m new file mode 100644 index 0000000000..a24faea984 --- /dev/null +++ b/packages/ml/ios/RNFBML/RNFBMLCommon.m @@ -0,0 +1,65 @@ +/** + * Copyright (c) 2016-present Invertase Limited & Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this library except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + + +#import +#import +#import "RNFBMLCommon.h" + +@implementation RNFBMLCommon + ++ (NSArray *)rectToIntArray:(CGRect)rect { + CGSize size = rect.size; + CGPoint point = rect.origin; + return @[@(point.x), @(point.y), @(point.x + size.width), @(point.y + size.height)]; +} + ++ (NSArray *)pointsToArray:(NSArray *_Nullable)points { + if (points == nil) { + return @[]; + } + + NSMutableArray *pointsArray = [[NSMutableArray alloc] init]; + for (NSValue *point in points) { + [pointsArray addObject:[self arrayForCGPoint:point.CGPointValue]]; + } + + return pointsArray; +} + ++ (NSArray *)arrayForCGPoint:(CGPoint)point { + return @[@(point.x), @(point.y)]; +} + ++ (NSArray *)arrayForFIRVisionPoint:(FIRVisionPoint *)point { + return @[point.x, point.y]; +} + ++ (void)UIImageForFilePath:(NSString *)localFilePath completion:(void (^)( + NSArray *errorCodeMessageArray, + UIImage *image +))completion { + if (![[NSFileManager defaultManager] fileExistsAtPath:localFilePath]) { + completion(@[@"file-not-found", @"The local file specified does not exist on the device."], nil); + } else { + dispatch_async(dispatch_get_main_queue(), ^{ + completion(nil, [RCTConvert UIImage:localFilePath]); + }); + } +} + +@end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h similarity index 90% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h rename to packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h index 4298dfe84c..eb9a79bb87 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.h +++ b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.h @@ -19,6 +19,6 @@ #import #import -@interface RNFBMLVisionLandmarkRecognizerModule : NSObject +@interface RNFBMLDocumentTextRecognizerModule : NSObject @end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m similarity index 88% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m rename to packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m index 82f4f7ccf1..23d4758704 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionDocumentTextRecognizerModule.m +++ b/packages/ml/ios/RNFBML/RNFBMLDocumentTextRecognizerModule.m @@ -18,17 +18,17 @@ #import #import -#import "RNFBMLVisionDocumentTextRecognizerModule.h" -#import "RNFBMLVisionCommon.h" +#import "RNFBMLDocumentTextRecognizerModule.h" +#import "RNFBMLCommon.h" -@implementation RNFBMLVisionDocumentTextRecognizerModule +@implementation RNFBMLDocumentTextRecognizerModule #pragma mark - #pragma mark Module Setup RCT_EXPORT_MODULE(); #pragma mark - -#pragma mark Firebase ML Kit Vision Methods +#pragma mark Firebase ML Methods RCT_EXPORT_METHOD(cloudDocumentTextRecognizerProcessImage: (FIRApp *) firebaseApp @@ -37,7 +37,7 @@ @implementation RNFBMLVisionDocumentTextRecognizerModule : (RCTPromiseResolveBlock)resolve : (RCTPromiseRejectBlock)reject ) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { + [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { if (errorCodeMessageArray != nil) { [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ @"code": errorCodeMessageArray[0], @@ -79,19 +79,19 @@ @implementation RNFBMLVisionDocumentTextRecognizerModule resolve(@{ @"text": result.text, - @"blocks": [self getVisionDocumentTextBlocksList:result.blocks], + @"blocks": [self getMLDocumentTextBlocksList:result.blocks], }); }]; }]; } -- (NSArray *)getVisionDocumentTextBlocksList:(NSArray *)blocks { +- (NSArray *)getMLDocumentTextBlocksList:(NSArray *)blocks { NSMutableArray *documentTextBlocksFormattedList = [[NSMutableArray alloc] init]; for (FIRVisionDocumentTextBlock *block in blocks) { NSMutableDictionary *documentTextBlockFormatted = [[NSMutableDictionary alloc] init]; - documentTextBlockFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:block.frame]; + documentTextBlockFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:block.frame]; documentTextBlockFormatted[@"text"] = block.text; documentTextBlockFormatted[@"confidence"] = block.confidence; documentTextBlockFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:block.recognizedLanguages]; @@ -130,7 +130,7 @@ - (NSArray *)getParagraphsList:(NSArray *)par for (FIRVisionDocumentTextParagraph *paragraph in paragraphs) { NSMutableDictionary *paragraphFormatted = [[NSMutableDictionary alloc] init]; - paragraphFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:paragraph.frame]; + paragraphFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:paragraph.frame]; paragraphFormatted[@"text"] = paragraph.text; paragraphFormatted[@"confidence"] = paragraph.confidence; paragraphFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:paragraph.recognizedLanguages]; @@ -149,7 +149,7 @@ - (NSArray *)getWordsList:(NSArray *)words { for (FIRVisionDocumentTextWord *word in words) { NSMutableDictionary *wordFormatted = [[NSMutableDictionary alloc] init]; - wordFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:word.frame]; + wordFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:word.frame]; wordFormatted[@"text"] = word.text; wordFormatted[@"confidence"] = word.confidence; wordFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:word.recognizedLanguages]; @@ -168,7 +168,7 @@ - (NSArray *)getSymbolList:(NSArray *)symbols { for (FIRVisionDocumentTextSymbol *symbol in symbols) { NSMutableDictionary *symbolFormatted = [[NSMutableDictionary alloc] init]; - symbolFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:symbol.frame]; + symbolFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:symbol.frame]; symbolFormatted[@"text"] = symbol.text; symbolFormatted[@"confidence"] = symbol.confidence; symbolFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:symbol.recognizedLanguages]; diff --git a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h similarity index 91% rename from packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h rename to packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h index 905bdad552..6a85d99084 100644 --- a/packages/ml-natural-language/ios/RNFBMLNaturalLanguage/RNFBMLNaturalLanguageIdModule.h +++ b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.h @@ -19,5 +19,6 @@ #import #import -@interface RNFBMLNaturalLanguageIdModule : NSObject +@interface RNFBMLImageLabelerModule : NSObject + @end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m similarity index 60% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m rename to packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m index 8d95fba257..663fc5c36a 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.m +++ b/packages/ml/ios/RNFBML/RNFBMLImageLabelerModule.m @@ -17,62 +17,17 @@ #import #import -#import "RNFBMLVisionImageLabelerModule.h" -#import "RNFBMLVisionCommon.h" +#import "RNFBMLImageLabelerModule.h" +#import "RNFBMLCommon.h" -@implementation RNFBMLVisionImageLabelerModule +@implementation RNFBMLImageLabelerModule #pragma mark - #pragma mark Module Setup RCT_EXPORT_MODULE(); #pragma mark - -#pragma mark Firebase ML Kit Vision Methods - -RCT_EXPORT_METHOD(imageLabelerProcessImage: - (FIRApp *) firebaseApp - : (NSString *)filePath - : (NSDictionary *)imageLabelerOptions - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { - if (errorCodeMessageArray != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": errorCodeMessageArray[0], - @"message": errorCodeMessageArray[1], - }]; - return; - } - - FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image]; - FIRVision *vision = [FIRVision visionForApp:firebaseApp]; - - FIRVisionOnDeviceImageLabelerOptions *options = [[FIRVisionOnDeviceImageLabelerOptions alloc] init]; - - if (imageLabelerOptions[@"confidenceThreshold"]) { - options.confidenceThreshold = [imageLabelerOptions[@"confidenceThreshold"] floatValue]; - } - - FIRVisionImageLabeler *labeler = [vision onDeviceImageLabelerWithOptions:options]; - [labeler processImage:visionImage completion:^(NSArray *_Nullable labels, NSError *error) { - if (error != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; - return; - } - - if (labels == nil) { - resolve(@[]); - return; - } - - resolve([self getLabelList:labels]); - }]; - }]; -} +#pragma mark Firebase ML Methods RCT_EXPORT_METHOD(cloudImageLabelerProcessImage: (FIRApp *) firebaseApp @@ -81,7 +36,7 @@ @implementation RNFBMLVisionImageLabelerModule : (RCTPromiseResolveBlock)resolve : (RCTPromiseRejectBlock)reject ) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { + [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { if (errorCodeMessageArray != nil) { [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ @"code": errorCodeMessageArray[0], diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h similarity index 92% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h rename to packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h index b7624d7829..cb84682e9d 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionFaceDetectorModule.h +++ b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.h @@ -19,6 +19,6 @@ #import #import -@interface RNFBMLVisionFaceDetectorModule : NSObject +@interface RNFBMLLandmarkRecognizerModule : NSObject @end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m similarity index 90% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m rename to packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m index f086ae9c0e..e2ea37a829 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionLandmarkRecognizerModule.m +++ b/packages/ml/ios/RNFBML/RNFBMLLandmarkRecognizerModule.m @@ -17,17 +17,17 @@ #import #import -#import "RNFBMLVisionLandmarkRecognizerModule.h" -#import "RNFBMLVisionCommon.h" +#import "RNFBMLLandmarkRecognizerModule.h" +#import "RNFBMLCommon.h" -@implementation RNFBMLVisionLandmarkRecognizerModule +@implementation RNFBMLLandmarkRecognizerModule #pragma mark - #pragma mark Module Setup RCT_EXPORT_MODULE(); #pragma mark - -#pragma mark Firebase ML Kit Vision Methods +#pragma mark Firebase ML Methods RCT_EXPORT_METHOD(cloudLandmarkRecognizerProcessImage: (FIRApp *) firebaseApp @@ -36,7 +36,7 @@ @implementation RNFBMLVisionLandmarkRecognizerModule : (RCTPromiseResolveBlock)resolve : (RCTPromiseRejectBlock)reject ) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { + [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { if (errorCodeMessageArray != nil) { [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ @"code": errorCodeMessageArray[0], @@ -81,7 +81,7 @@ @implementation RNFBMLVisionLandmarkRecognizerModule visionLandmark[@"confidence"] = landmark.confidence; visionLandmark[@"entityId"] = landmark.entityId; visionLandmark[@"landmark"] = landmark.landmark; - visionLandmark[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:landmark.frame]; + visionLandmark[@"boundingBox"] = [RNFBMLCommon rectToIntArray:landmark.frame]; NSMutableArray *locations = [[NSMutableArray alloc] init]; for (FIRVisionLatitudeLongitude *location in landmark.locations) { diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h similarity index 90% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h rename to packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h index b7ed0366b0..251401caca 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionImageLabelerModule.h +++ b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.h @@ -19,6 +19,6 @@ #import #import -@interface RNFBMLVisionImageLabelerModule : NSObject +@interface RNFBMLTextRecognizerModule : NSObject -@end \ No newline at end of file +@end diff --git a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m similarity index 65% rename from packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m rename to packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m index ae79f74bdb..53f4cf1938 100644 --- a/packages/ml-vision/ios/RNFBMLVision/RNFBMLVisionTextRecognizerModule.m +++ b/packages/ml/ios/RNFBML/RNFBMLTextRecognizerModule.m @@ -17,51 +17,17 @@ #import #import -#import "RNFBMLVisionTextRecognizerModule.h" -#import "RNFBMLVisionCommon.h" +#import "RNFBMLTextRecognizerModule.h" +#import "RNFBMLCommon.h" -@implementation RNFBMLVisionTextRecognizerModule +@implementation RNFBMLTextRecognizerModule #pragma mark - #pragma mark Module Setup RCT_EXPORT_MODULE(); #pragma mark - -#pragma mark Firebase ML Kit Vision Methods - -RCT_EXPORT_METHOD(textRecognizerProcessImage: - (FIRApp *) firebaseApp - : (NSString *)filePath - : (RCTPromiseResolveBlock)resolve - : (RCTPromiseRejectBlock)reject -) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { - if (errorCodeMessageArray != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": errorCodeMessageArray[0], - @"message": errorCodeMessageArray[1], - }]; - return; - } - - FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithImage:image]; - FIRVision *vision = [FIRVision visionForApp:firebaseApp]; - - FIRVisionTextRecognizer *textRecognizer = [vision onDeviceTextRecognizer]; - - [textRecognizer processImage:visionImage completion:^(FIRVisionText *text, NSError *error) { - if (error != nil) { - [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ - @"code": @"unknown", - @"message": [error localizedDescription], - }]; - return; - } - - resolve([self getFirebaseVisionTextMap:text]); - }]; - }]; -} +#pragma mark Firebase ML Methods RCT_EXPORT_METHOD(cloudTextRecognizerProcessImage: (FIRApp *) firebaseApp @@ -70,7 +36,7 @@ @implementation RNFBMLVisionTextRecognizerModule : (RCTPromiseResolveBlock)resolve : (RCTPromiseRejectBlock)reject ) { - [RNFBMLVisionCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { + [RNFBMLCommon UIImageForFilePath:filePath completion:^(NSArray *errorCodeMessageArray, UIImage *image) { if (errorCodeMessageArray != nil) { [RNFBSharedUtils rejectPromiseWithUserInfo:reject userInfo:(NSMutableDictionary *) @{ @"code": errorCodeMessageArray[0], @@ -110,21 +76,21 @@ @implementation RNFBMLVisionTextRecognizerModule return; } - resolve([self getFirebaseVisionTextMap:text]); + resolve([self getFirebaseMLTextMap:text]); }]; }]; } -- (NSDictionary *)getFirebaseVisionTextMap:(FIRVisionText *)text { - NSMutableDictionary *firebaseVisionTextMap = [[NSMutableDictionary alloc] init]; +- (NSDictionary *)getFirebaseMLTextMap:(FIRVisionText *)text { + NSMutableDictionary *firebaseMLTextMap = [[NSMutableDictionary alloc] init]; - firebaseVisionTextMap[@"text"] = text.text; - firebaseVisionTextMap[@"blocks"] = [self getVisionTextBlocksList:text.blocks]; + firebaseMLTextMap[@"text"] = text.text; + firebaseMLTextMap[@"blocks"] = [self getMLTextBlocksList:text.blocks]; - return firebaseVisionTextMap; + return firebaseMLTextMap; } -- (NSArray *)getVisionTextBlocksList:(NSArray *)blocks { +- (NSArray *)getMLTextBlocksList:(NSArray *)blocks { NSMutableArray *blockListFormatted = [[NSMutableArray alloc] init]; for (FIRVisionTextBlock *block in blocks) { @@ -132,9 +98,9 @@ - (NSArray *)getVisionTextBlocksList:(NSArray *)blocks { textBlockFormatted[@"text"] = block.text; textBlockFormatted[@"confidence"] = block.confidence; - textBlockFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:block.frame]; + textBlockFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:block.frame]; textBlockFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:block.recognizedLanguages]; - textBlockFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:block.cornerPoints]; + textBlockFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:block.cornerPoints]; textBlockFormatted[@"lines"] = [self getLinesList:block.lines]; [blockListFormatted addObject:textBlockFormatted]; @@ -149,11 +115,11 @@ - (NSArray *)getLinesList:(NSArray *)lines { for (FIRVisionTextLine *line in lines) { NSMutableDictionary *lineFormatted = [[NSMutableDictionary alloc] init]; - lineFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:line.frame]; + lineFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:line.frame]; lineFormatted[@"text"] = line.text; lineFormatted[@"confidence"] = line.confidence; lineFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:line.recognizedLanguages]; - lineFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:line.cornerPoints]; + lineFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:line.cornerPoints]; lineFormatted[@"elements"] = [self getElementsList:line.elements]; [lineListFormatted addObject:lineFormatted]; @@ -168,11 +134,11 @@ - (NSArray *)getElementsList:(NSArray *)elements { for (FIRVisionTextElement *element in elements) { NSMutableDictionary *elementFormatted = [[NSMutableDictionary alloc] init]; - elementFormatted[@"boundingBox"] = [RNFBMLVisionCommon rectToIntArray:element.frame]; + elementFormatted[@"boundingBox"] = [RNFBMLCommon rectToIntArray:element.frame]; elementFormatted[@"text"] = element.text; elementFormatted[@"confidence"] = element.confidence; elementFormatted[@"recognizedLanguages"] = [self getLanguageCodesList:element.recognizedLanguages]; - elementFormatted[@"cornerPoints"] = [RNFBMLVisionCommon visionPointsToArray:element.cornerPoints]; + elementFormatted[@"cornerPoints"] = [RNFBMLCommon pointsToArray:element.cornerPoints]; [elementsListFormatted addObject:elementFormatted]; } diff --git a/packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js b/packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js similarity index 95% rename from packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js rename to packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js index 73a1a20ed4..7263a1e38a 100644 --- a/packages/ml-vision/lib/visionCloudDocumentTextRecognizerOptions.js +++ b/packages/ml/lib/MLCloudDocumentTextRecognizerOptions.js @@ -24,9 +24,7 @@ import { isUndefined, } from '@react-native-firebase/app/lib/common'; -export default function visionCloudDocumentTextRecognizerOptions( - cloudDocumentTextRecognizerOptions, -) { +export default function MLCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions) { const out = { enforceCertFingerprintMatch: false, }; diff --git a/packages/ml-vision/lib/visionCloudImageLabelerOptions.js b/packages/ml/lib/MLCloudImageLabelerOptions.js similarity index 96% rename from packages/ml-vision/lib/visionCloudImageLabelerOptions.js rename to packages/ml/lib/MLCloudImageLabelerOptions.js index 6900225242..6339968454 100644 --- a/packages/ml-vision/lib/visionCloudImageLabelerOptions.js +++ b/packages/ml/lib/MLCloudImageLabelerOptions.js @@ -24,7 +24,7 @@ import { isUndefined, } from '@react-native-firebase/app/lib/common'; -export default function visionCloudImageLabelerOptions(cloudImageLabelerOptions) { +export default function MLCloudImageLabelerOptions(cloudImageLabelerOptions) { const out = { enforceCertFingerprintMatch: false, confidenceThreshold: 0.5, diff --git a/packages/ml-vision/lib/VisionCloudLandmarkRecognizerModelType.js b/packages/ml/lib/MLCloudLandmarkRecognizerModelType.js similarity index 100% rename from packages/ml-vision/lib/VisionCloudLandmarkRecognizerModelType.js rename to packages/ml/lib/MLCloudLandmarkRecognizerModelType.js diff --git a/packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js b/packages/ml/lib/MLCloudLandmarkRecognizerOptions.js similarity index 81% rename from packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js rename to packages/ml/lib/MLCloudLandmarkRecognizerOptions.js index 0f44db69dc..4b1997309c 100644 --- a/packages/ml-vision/lib/visionCloudLandmarkRecognizerOptions.js +++ b/packages/ml/lib/MLCloudLandmarkRecognizerOptions.js @@ -23,13 +23,13 @@ import { isString, isUndefined, } from '@react-native-firebase/app/lib/common'; -import VisionCloudLandmarkRecognizerModelType from './VisionCloudLandmarkRecognizerModelType'; +import MLCloudLandmarkRecognizerModelType from './MLCloudLandmarkRecognizerModelType'; -export default function visionCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions) { +export default function MLCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions) { const out = { enforceCertFingerprintMatch: false, maxResults: 10, - model: VisionCloudLandmarkRecognizerModelType.STABLE_MODEL, + model: MLCloudLandmarkRecognizerModelType.STABLE_MODEL, }; if (isUndefined(cloudLandmarkRecognizerOptions)) { @@ -69,12 +69,11 @@ export default function visionCloudLandmarkRecognizerOptions(cloudLandmarkRecogn if (cloudLandmarkRecognizerOptions.modelType) { if ( cloudLandmarkRecognizerOptions.modelType !== - VisionCloudLandmarkRecognizerModelType.STABLE_MODEL && - cloudLandmarkRecognizerOptions.modelType !== - VisionCloudLandmarkRecognizerModelType.LATEST_MODEL + MLCloudLandmarkRecognizerModelType.STABLE_MODEL && + cloudLandmarkRecognizerOptions.modelType !== MLCloudLandmarkRecognizerModelType.LATEST_MODEL ) { throw new Error( - "'cloudLandmarkRecognizerOptions.modelType' invalid model. Expected VisionCloudLandmarkRecognizerModelType.STABLE_MODEL or VisionCloudLandmarkRecognizerModelType.LATEST_MODEL.", + "'cloudLandmarkRecognizerOptions.modelType' invalid model. Expected MLCloudLandmarkRecognizerModelType.STABLE_MODEL or MLCloudLandmarkRecognizerModelType.LATEST_MODEL.", ); } diff --git a/packages/ml-vision/lib/VisionCloudTextRecognizerModelType.js b/packages/ml/lib/MLCloudTextRecognizerModelType.js similarity index 100% rename from packages/ml-vision/lib/VisionCloudTextRecognizerModelType.js rename to packages/ml/lib/MLCloudTextRecognizerModelType.js diff --git a/packages/ml-vision/lib/visionCloudTextRecognizerOptions.js b/packages/ml/lib/MLCloudTextRecognizerOptions.js similarity index 85% rename from packages/ml-vision/lib/visionCloudTextRecognizerOptions.js rename to packages/ml/lib/MLCloudTextRecognizerOptions.js index 2d013f0b03..8dcc45eee2 100644 --- a/packages/ml-vision/lib/visionCloudTextRecognizerOptions.js +++ b/packages/ml/lib/MLCloudTextRecognizerOptions.js @@ -23,12 +23,12 @@ import { isString, isUndefined, } from '@react-native-firebase/app/lib/common'; -import VisionCloudTextRecognizerModelType from './VisionCloudTextRecognizerModelType'; +import MLCloudTextRecognizerModelType from './MLCloudTextRecognizerModelType'; -export default function visionCloudTextRecognizerOptions(cloudTextRecognizerOptions) { +export default function MLCloudTextRecognizerOptions(cloudTextRecognizerOptions) { const out = { enforceCertFingerprintMatch: false, - modelType: VisionCloudTextRecognizerModelType.SPARSE_MODEL, + modelType: MLCloudTextRecognizerModelType.SPARSE_MODEL, }; if (isUndefined(cloudTextRecognizerOptions)) { @@ -59,8 +59,8 @@ export default function visionCloudTextRecognizerOptions(cloudTextRecognizerOpti if (cloudTextRecognizerOptions.modelType) { if ( - cloudTextRecognizerOptions.modelType !== VisionCloudTextRecognizerModelType.DENSE_MODEL && - cloudTextRecognizerOptions.modelType !== VisionCloudTextRecognizerModelType.SPARSE_MODEL + cloudTextRecognizerOptions.modelType !== MLCloudTextRecognizerModelType.DENSE_MODEL && + cloudTextRecognizerOptions.modelType !== MLCloudTextRecognizerModelType.SPARSE_MODEL ) { throw new Error( "'cloudTextRecognizerOptions.modelType' invalid model. Expected VisionCloudTextRecognizerModelType.DENSE_MODEL or VisionCloudTextRecognizerModelType.SPARSE_MODEL.", diff --git a/packages/ml-vision/lib/VisionDocumentTextRecognizedBreakType.js b/packages/ml/lib/MLDocumentTextRecognizedBreakType.js similarity index 100% rename from packages/ml-vision/lib/VisionDocumentTextRecognizedBreakType.js rename to packages/ml/lib/MLDocumentTextRecognizedBreakType.js diff --git a/packages/ml/lib/index.d.ts b/packages/ml/lib/index.d.ts new file mode 100644 index 0000000000..9012b33ca6 --- /dev/null +++ b/packages/ml/lib/index.d.ts @@ -0,0 +1,701 @@ +/* + * Copyright (c) 2016-present Invertase Limited & Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this library except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { ReactNativeFirebase } from '@react-native-firebase/app'; +/** + * Firebase ML package for React Native. + * + * #### Example 1 + * + * Access the firebase export from the `ml` package: + * + * ```js + * import { firebase } from '@react-native-firebase/ml'; + * + * // firebase.ml().X + * ``` + * + * #### Example 2 + * + * Using the default export from the `ml` package: + * + * ```js + * import ml from '@react-native-firebase/ml'; + * + * // ml().X + * ``` + * + * #### Example 3 + * + * Using the default export from the `app` package: + * + * ```js + * import firebase from '@react-native-firebase/app'; + * import '@react-native-firebase/ml'; + * + * // firebase.ml().X + * ``` + * + * @firebase ml + */ +export namespace FirebaseMLTypes { + import FirebaseModule = ReactNativeFirebase.FirebaseModule; + + export interface Statics { + MLCloudTextRecognizerModelType: typeof MLCloudTextRecognizerModelType; + MLCloudLandmarkRecognizerModelType: typeof MLCloudLandmarkRecognizerModelType; + MLDocumentTextRecognizedBreakType: typeof MLDocumentTextRecognizedBreakType; + } + + /** + * Options for cloud image labeler. Confidence threshold could be provided for the label detection. + * + * For example, if the confidence threshold is set to 0.7, only labels with confidence >= 0.7 would be returned. The default threshold is 0.5. + * + * Note: at most 20 labels will be returned for cloud image labeler. + */ + export interface MLCloudImageLabelerOptions { + /** + * Only allow registered application instances with matching certificate fingerprint to use ML API. + * + * > Do not set this for debug build if you use simulators to test. + * + * #### Example + * + * ```js + * await firebase.ml().cloudImageLabelerProcessImage(filePath, { + * enforceCertFingerprintMatch: true, + * }); + * ``` + */ + enforceCertFingerprintMatch?: boolean; + + /** + * Sets confidence threshold in the range of [0.0 - 1.0] of detected labels. Only labels detected with confidence higher than this threshold are returned. + * + * Defaults to 0.5. + * + * #### Example + * + * ```js + * await firebase.ml().cloudImageLabelerProcessImage(filePath, { + * confidenceThreshold: 0.8, + * }); + * ``` + */ + confidenceThreshold?: number; + + /** + * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used. + * + * #### Example + * + * ```js + * await firebase.ml().cloudImageLabelerProcessImage(filePath, { + * apiKeyOverride: 'xyz123', + * }); + * ``` + * + * @ios + */ + apiKeyOverride?: string; + } + + /** + * Detector for finding popular natural and man-made structures within an image. + */ + export interface MLCloudLandmarkRecognizerOptions { + /** + * Only allow registered application instances with matching certificate fingerprint to use ML API. + * + * > Do not set this for debug build if you use simulators to test. + */ + enforceCertFingerprintMatch?: boolean; + + /** + * Sets the maximum number of results of this type. + * + * Defaults to 10. + */ + maxResults?: number; + + /** + * Sets model type for the detection. + * + * Defaults to `MLCloudLandmarkRecognizerModelType.STABLE_MODEL`. + */ + modelType?: + | MLCloudLandmarkRecognizerModelType.STABLE_MODEL + | MLCloudLandmarkRecognizerModelType.LATEST_MODEL; + + /** + * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used. + * + * @ios + */ + apiKeyOverride?: string; + } + + /** + * Model types for cloud landmark recognition. + */ + export enum MLCloudLandmarkRecognizerModelType { + /** + * Stable model would be used. + */ + STABLE_MODEL = 1, + + /** + * Latest model would be used. + */ + LATEST_MODEL = 2, + } + + /** + * Options for cloud text recognizer. + */ + export interface MLCloudTextRecognizerOptions { + /** + * Only allow registered application instances with matching certificate fingerprint to use ML API. + * + * > Do not set this for debug build if you use simulators to test. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * enforceCertFingerprintMatch: true, + * }); + * ``` + */ + enforceCertFingerprintMatch?: boolean; + + /** + * Sets model type for cloud text recognition. The two models SPARSE_MODEL and DENSE_MODEL handle different text densities in an image. + * + * See `MLCloudTextRecognizerModelType` for types. + * + * Defaults to `MLCloudTextRecognizerModelType.SPARSE_MODEL`. + * + * #### Example + * + * ```js + * import { + * firebase, + * MLCloudTextRecognizerModelType, + * } from '@react-native-firebase/ml'; + * + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * modelType: MLCloudTextRecognizerModelType.DENSE_MODEL, + * }); + * ``` + */ + modelType?: + | MLCloudTextRecognizerModelType.SPARSE_MODEL + | MLCloudTextRecognizerModelType.DENSE_MODEL; + + /** + * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language + * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when + * the language of the text in the image is known, setting a hint will help get better results (although it will be a + * significant hindrance if the hint is wrong). + * + * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * languageHints: ['fr', 'de'], + * }); + * ``` + */ + languageHints?: string[]; + + /** + * API key to use for Cloud ML API. If not set, the default API key from `firebase.app()` will be used. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * apiKeyOverride: 'xyz123', + * }); + * ``` + * + * @ios + */ + apiKeyOverride?: string; + } + + /** + * Options for the cloud document text recognizer. + */ + export interface MLCloudDocumentTextRecognizerOptions { + /** + * Only allow registered application instances with matching certificate fingerprint to use ML API. + * + * > Do not set this for debug build if you use simulators to test. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * enforceCertFingerprintMatch: true, + * }); + * ``` + */ + enforceCertFingerprintMatch?: boolean; + + /** + * Sets language hints. In most cases, not setting this yields the best results since it enables automatic language + * detection. For languages based on the Latin alphabet, setting language hints is not needed. In rare cases, when + * the language of the text in the image is known, setting a hint will help get better results (although it will be a + * significant hindrance if the hint is wrong). + * + * Each language code must be a BCP-47 identifier. See [Google Cloud OCR Language Support](https://cloud.google.com/vision/docs/languages) for more information. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * languageHints: ['fr', 'de'], + * }); + * ``` + */ + languageHints?: string[]; + + /** + * API key to use for ML API. If not set, the default API key from `firebase.app()` will be used. + * + * #### Example + * + * ```js + * await firebase.ml().cloudTextRecognizerProcessImage(filePath, { + * apiKeyOverride: 'xyz123', + * }); + * ``` + * + * @ios + */ + apiKeyOverride?: string; + } + + /** + * The cloud model type used for in MLCloudTextRecognizerOptions & MLCloudDocumentTextRecognizerOptions + * + * Defaults to `SPARSE_MODEL` + */ + export enum MLCloudTextRecognizerModelType { + /** + * Dense model type. It is more suitable for well-formatted dense text. + */ + SPARSE_MODEL = 1, + /** + * Sparse model type. It is more suitable for sparse text. + */ + DENSE_MODEL = 2, + } + + /** + * A Rectangle holds four number coordinates relative to the processed image. + * Rectangle are represented as [left, top, right, bottom]. + * + * Used by ML Text Recognizer & Landmark Recognition APIs. + */ + export type MLRectangle = [number, number, number, number]; + + /** + * A point holds two number coordinates relative to the processed image. + * Points are represented as [x, y]. + * + * Used by ML Text Recognizer & Landmark Recognition APIs. + */ + export type MLPoint = [number, number]; + + /** + * A hierarchical representation of texts recognized in an image. + */ + export interface MLText { + /** + * Retrieve the recognized text as a string. + */ + text: string; + + /** + * Gets an array `MLTextBlock`, which is a block of text that can be further decomposed to an array of `MLTextLine`. + */ + blocks: MLTextBlock[]; + } + + /** + * Represents a block of text. + */ + export interface MLDocumentTextBlock extends MLDocumentTextBase { + /** + * Gets an Array of `MLDocumentTextParagraph`s that make up this block. + */ + paragraphs: MLDocumentTextParagraph[]; + } + + /** + * A structural unit of text representing a number of words in certain order. + */ + export interface MLDocumentTextParagraph extends MLDocumentTextBase { + /** + * Gets an Array of `MLDocumentTextWord`s that make up this paragraph. + * + * Returns an empty list if no Word is found. + */ + words: MLDocumentTextWord[]; + } + + /** + * A single word representation. + */ + export interface MLDocumentTextWord extends MLDocumentTextBase { + /** + * Gets an Array of `MLDocumentTextSymbol`s that make up this word. + * The order of the symbols follows the natural reading order. + */ + symbols: MLDocumentTextSymbol[]; + } + + /** + * A single symbol representation. + */ + export type MLDocumentTextSymbol = MLDocumentTextBase; + + /** + * Enum representing the detected break type. + */ + export enum MLDocumentTextRecognizedBreakType { + /** + * Line-wrapping break. + */ + EOL_SURE_SPACE = 3, + + /** + * End-line hyphen that is not present in text; does not co-occur with `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. + */ + HYPHEN = 4, + + /** + * Line break that ends a paragraph. + */ + LINE_BREAK = 5, + + /** + * Regular space. + */ + SPACE = 1, + + /** + * Sure space (very wide). + */ + SURE_SPACE = 2, + + /** + * Unknown break label type. + */ + UNKNOWN = 0, + } + + /** + * A recognized break is the detected start or end of a structural component. + */ + export interface MLDocumentTextRecognizedBreak { + /** + * Gets detected break type. + */ + breakType: MLDocumentTextRecognizedBreakType; + + /** + * Returns true if break prepends an element. + */ + isPrefix: boolean; + } + /** + * A shared type that all MLDocumentText components inherit from + */ + export interface MLDocumentTextBase { + /** + * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `MLTextBlock`, and left-to-right within a `MLTextLine`. + */ + text: string; + + /** + * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null. + */ + confidence: null | number; + + /** + * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array) + * + * A language is the BCP-47 language code, such as "en-US" or "sr-Latn". + */ + recognizedLanguages: string[]; + + /** + * Returns the bounding rectangle of the detected text. + */ + boundingBox: MLRectangle; + + /** + * Gets the recognized break - the detected start or end of a structural component. + */ + recognizedBreak: MLDocumentTextRecognizedBreak; + } + + /** + * A hierarchical representation of document text recognized in an image. + */ + export interface MLDocumentText { + /** + * Retrieve the recognized text as a string. + */ + text: string; + + /** + * Gets an array `MLTextBlock`, which is a block of text that can be further decomposed to an array of `MLDocumentTextParagraph`. + */ + blocks: MLDocumentTextBlock[]; + } + + /** + * A shared type that all ML Text components inherit from + */ + export interface MLTextBase { + /** + * Gets the recognized text as a string. Returned in reading order for the language. For Latin, this is top to bottom within a `MLTextBlock`, and left-to-right within a `MLTextLine`. + */ + text: string; + + /** + * The confidence of the recognized text. It only return valid result from cloud recognizers. For on-device text recognition, the confidence is always null. + */ + confidence: null | number; + + /** + * Gets a list of recognized languages. (Cloud API only. On-Device returns empty array) + * + * A language is the BCP-47 language code, such as "en-US" or "sr-Latn". + */ + recognizedLanguages: string[]; + + /** + * Returns the bounding rectangle of the detected text. + */ + boundingBox: MLRectangle; + + /** + * Gets the four corner points in clockwise direction starting with top-left. Due to the possible perspective distortions, this is not necessarily a rectangle. Parts of the region could be outside of the image. + */ + cornerPoints: MLPoint[]; + } + + /** + * Represents a block of text (similar to a paragraph). + */ + export interface MLTextBlock extends MLTextBase { + /** + * Gets an Array of MLTextLine's that make up this text block. + */ + lines: MLTextLine[]; + } + + /** + * Represents a line of text. + */ + export interface MLTextLine extends MLTextBase { + /** + * Gets an Array of MLTextElement's that make up this text block. + * + * An element is roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element. + */ + elements: MLTextElement[]; + } + + /** + * Roughly equivalent to a space-separated "word" in most Latin languages, or a character in others. For instance, if a word is split between two lines by a hyphen, each part is encoded as a separate Element. + */ + export type MLTextElement = MLTextBase; + + /** + * Represents an image label return from `imageLabelerProcessImage()` and `cloudImageLabelerProcessImage()`. + */ + export interface MLImageLabel { + /** + * Returns a detected label from the given image. The label returned here is in English only. + * + * Use `entityId` to retrieve a unique id. + */ + text: string; + + /** + * Returns an opaque entity ID. IDs are available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/). + */ + entityId: string; + + /** + * Gets overall confidence of the result. + * + * Range between 0 (low confidence) and 1 (high confidence). + */ + confidence: number; + } + + /** + * Represents a detected landmark returned from `cloudLandmarkRecognizerProcessImage()`. + */ + export interface MLLandmark { + /** + * Gets image region of the detected landmark. Returns null if nothing was detected + */ + boundingBox: MLRectangle | null; + + /** + * Gets overall confidence of the result. Ranging between 0 & 1. + */ + confidence: number; + + /** + * Gets opaque entity ID. Some IDs may be available in [Google Knowledge Graph Search API](https://developers.google.com/knowledge-graph/). + */ + entityId: string; + + /** + * Gets the detected landmark. + */ + landmark: string; + + /** + * Gets the location information for the detected entity. + * + * Multiple MLGeoPoint elements can be present because one location may indicate the location of the scene + * in the image, and another location may indicate the location of the place where the image was taken. + * Location information is usually present for landmarks. + */ + locations: MLGeoPoint[]; + } + + /** + * A representation of a latitude/longitude pair. + * + * This is expressed as an array of numbers representing degrees latitude and degrees longitude, in the form `[lat, lng]`. + */ + export type MLGeoPoint = [number, number]; + + /** + * The Firebase ML service interface. + * + * > This module is available for the default app only. + * + * #### Example + * + * Get the ML service for the default app: + * + * ```js + * const defaultAppML = firebase.ml(); + * ``` + */ + export class Module extends FirebaseModule { + /** + * Detect text from a local image file. + * + * @param imageFilePath A local path to an image on the device. + * @param cloudTextRecognizerOptions An instance of `MLCloudTextRecognizerOptions`. + */ + cloudTextRecognizerProcessImage( + imageFilePath: string, + cloudTextRecognizerOptions?: MLCloudTextRecognizerOptions, + ): Promise; + + /** + * Detect text within a document using a local image file. + * + * @param imageFilePath A local path to an image on the device. + * @param cloudDocumentTextRecognizerOptions An instance of `MLCloudDocumentTextRecognizerOptions`. + */ + cloudDocumentTextRecognizerProcessImage( + imageFilePath: string, + cloudDocumentTextRecognizerOptions?: MLCloudDocumentTextRecognizerOptions, + ): Promise; + + /** + * Returns an array of landmarks (as `MLLandmark`) of a given local image file path + * + * @param imageFilePath A local image file path. + * @param cloudLandmarkRecognizerOptions An optional instance of `MLCloudLandmarkRecognizerOptions`. + */ + cloudLandmarkRecognizerProcessImage( + imageFilePath: string, + cloudLandmarkRecognizerOptions?: MLCloudLandmarkRecognizerOptions, + ): Promise; + + /** + * Returns an array of labels (as `MLImageLabel`) of a given local image file path. + * + * #### Example + * + * ```js + * const labels = await firebase.ml().cloudImageLabelerProcessImage(filePath, { + * confidenceThreshold: 0.8, + * }); + * ``` + * + * @param imageFilePath A local image file path. + * @param cloudImageLabelerOptions An optional instance of `MLCloudImageLabelerOptions`. + */ + cloudImageLabelerProcessImage( + imageFilePath: string, + cloudImageLabelerOptions?: MLCloudImageLabelerOptions, + ): Promise; + } +} + +declare const defaultExport: ReactNativeFirebase.FirebaseModuleWithStaticsAndApp< + FirebaseMLTypes.Module, + FirebaseMLTypes.Statics +>; + +export const firebase: ReactNativeFirebase.Module & { + analytics: typeof defaultExport; + app(name?: string): ReactNativeFirebase.FirebaseApp & { ml(): FirebaseMLTypes.Module }; +}; + +export const MLCloudTextRecognizerModelType: FirebaseMLTypes.Statics['MLCloudTextRecognizerModelType']; +export const MLDocumentTextRecognizedBreakType: FirebaseMLTypes.Statics['MLDocumentTextRecognizedBreakType']; +export const MLCloudLandmarkRecognizerModelType: FirebaseMLTypes.Statics['MLCloudLandmarkRecognizerModelType']; + +export default defaultExport; + +/** + * Attach namespace to `firebase.` and `FirebaseApp.`. + */ +declare module '@react-native-firebase/app' { + namespace ReactNativeFirebase { + import FirebaseModuleWithStaticsAndApp = ReactNativeFirebase.FirebaseModuleWithStaticsAndApp; + interface Module { + ml: FirebaseModuleWithStaticsAndApp; + } + + interface FirebaseApp { + ml(): FirebaseMLTypes.Module; + } + } +} diff --git a/packages/ml/lib/index.js b/packages/ml/lib/index.js new file mode 100644 index 0000000000..79b25a4247 --- /dev/null +++ b/packages/ml/lib/index.js @@ -0,0 +1,145 @@ +/* + * Copyright (c) 2016-present Invertase Limited & Contributors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this library except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { isString, toFilePath } from '@react-native-firebase/app/lib/common'; +import { + createModuleNamespace, + FirebaseModule, + getFirebaseRoot, +} from '@react-native-firebase/app/lib/internal'; +import version from './version'; +import MLCloudDocumentTextRecognizerOptions from './MLCloudDocumentTextRecognizerOptions'; +import MLCloudImageLabelerOptions from './MLCloudImageLabelerOptions'; +import MLCloudLandmarkRecognizerModelType from './MLCloudLandmarkRecognizerModelType'; +import MLCloudLandmarkRecognizerOptions from './MLCloudLandmarkRecognizerOptions'; +import MLCloudTextRecognizerModelType from './MLCloudTextRecognizerModelType'; +import MLCloudTextRecognizerOptions from './MLCloudTextRecognizerOptions'; +import MLDocumentTextRecognizedBreakType from './MLDocumentTextRecognizedBreakType'; + +const statics = { + MLCloudTextRecognizerModelType, + MLCloudLandmarkRecognizerModelType, + MLDocumentTextRecognizedBreakType, +}; + +const namespace = 'ml'; +const nativeModuleName = [ + 'RNFBMLImageLabelerModule', + 'RNFBMLTextRecognizerModule', + 'RNFBMLLandmarkRecognizerModule', + 'RNFBMLDocumentTextRecognizerModule', +]; + +class FirebaseMLModule extends FirebaseModule { + cloudTextRecognizerProcessImage(localImageFilePath, cloudTextRecognizerOptions) { + if (!isString(localImageFilePath)) { + throw new Error( + "firebase.ml().cloudTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", + ); + } + + let options; + try { + options = MLCloudTextRecognizerOptions(cloudTextRecognizerOptions); + } catch (e) { + throw new Error(`firebase.ml().cloudTextRecognizerProcessImage(_, *) ${e.message}`); + } + + return this.native.cloudTextRecognizerProcessImage(toFilePath(localImageFilePath), options); + } + + cloudDocumentTextRecognizerProcessImage(localImageFilePath, cloudDocumentTextRecognizerOptions) { + if (!isString(localImageFilePath)) { + throw new Error( + "firebase.ml().cloudDocumentTextRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", + ); + } + + let options; + try { + options = MLCloudDocumentTextRecognizerOptions(cloudDocumentTextRecognizerOptions); + } catch (e) { + throw new Error(`firebase.ml().cloudDocumentTextRecognizerProcessImage(_, *) ${e.message}.`); + } + + return this.native.cloudDocumentTextRecognizerProcessImage( + toFilePath(localImageFilePath), + options, + ); + } + + cloudLandmarkRecognizerProcessImage(localImageFilePath, cloudLandmarkRecognizerOptions) { + if (!isString(localImageFilePath)) { + throw new Error( + "firebase.ml().cloudLandmarkRecognizerProcessImage(*) 'localImageFilePath' expected a string local file path.", + ); + } + + let options; + try { + options = MLCloudLandmarkRecognizerOptions(cloudLandmarkRecognizerOptions); + } catch (e) { + throw new Error(`firebase.ml().cloudLandmarkRecognizerProcessImage(_, *) ${e.message}.`); + } + + return this.native.cloudLandmarkRecognizerProcessImage(toFilePath(localImageFilePath), options); + } + + cloudImageLabelerProcessImage(localImageFilePath, cloudImageLabelerOptions) { + if (!isString(localImageFilePath)) { + throw new Error( + "firebase.ml().cloudImageLabelerProcessImage(*) 'localImageFilePath' expected a string local file path.", + ); + } + + let options; + try { + options = MLCloudImageLabelerOptions(cloudImageLabelerOptions); + } catch (e) { + throw new Error(`firebase.ml().cloudImageLabelerProcessImage(_, *) ${e.message}.`); + } + + return this.native.cloudImageLabelerProcessImage(toFilePath(localImageFilePath), options); + } +} + +// import { SDK_VERSION } from '@react-native-firebase/ml'; +export const SDK_VERSION = version; + +// import ML from '@react-native-firebase/ml'; +// ml().X(...); +export default createModuleNamespace({ + statics, + version, + namespace, + nativeModuleName, + nativeEvents: false, + hasMultiAppSupport: true, + hasCustomUrlOrRegionSupport: false, + ModuleClass: FirebaseMLModule, +}); + +// import ml, { firebase } from '@react-native-firebase/ml'; +// ml().X(...); +// firebase.ml().X(...); +export const firebase = getFirebaseRoot(); + +// e.g. +// // import { MLCloudTextRecognizerModelType } from '@react-native-firebase/ml'; +export { default as MLCloudTextRecognizerModelType } from './MLCloudTextRecognizerModelType'; +export { default as MLDocumentTextRecognizedBreakType } from './MLDocumentTextRecognizedBreakType'; +export { default as MLCloudLandmarkRecognizerModelType } from './MLCloudLandmarkRecognizerModelType'; diff --git a/packages/ml-vision/package.json b/packages/ml/package.json similarity index 65% rename from packages/ml-vision/package.json rename to packages/ml/package.json index d1d21234d1..4f9111ab46 100644 --- a/packages/ml-vision/package.json +++ b/packages/ml/package.json @@ -1,8 +1,8 @@ { - "name": "@react-native-firebase/ml-vision", + "name": "@react-native-firebase/ml", "version": "7.4.13", "author": "Invertase (http://invertase.io)", - "description": "React Native Firebase - Firebase ML Kit brings the power of machine learning vision to your React Native application, supporting both Android & iOS.", + "description": "React Native Firebase - Firebase ML brings the power of machine learning vision to your React Native application, supporting both Android & iOS.", "main": "lib/index.js", "types": "lib/index.d.ts", "scripts": { @@ -12,24 +12,18 @@ }, "repository": { "type": "git", - "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml-vision" + "url": "https://github.com/invertase/react-native-firebase/tree/master/packages/ml" }, "license": "Apache-2.0", "keywords": [ "react", "react-native", "firebase", - "mlkit", + "ml", "machine learning", "text recognition", "landmark recognition", - "image labeler", - "face detector", - "barcode", - "label", - "natural language", - "nlp", - "vision" + "image labeler" ], "peerDependencies": { "@react-native-firebase/app": "8.4.7" diff --git a/packages/ml/type-test.ts b/packages/ml/type-test.ts new file mode 100644 index 0000000000..42313332a8 --- /dev/null +++ b/packages/ml/type-test.ts @@ -0,0 +1,37 @@ +import firebase from '@react-native-firebase/app'; +import * as ml from '@react-native-firebase/ml'; + +console.log(ml.default().app); + +// checks module exists at root +console.log(firebase.ml().app.name); + +// checks module exists at app level +console.log(firebase.app().ml().app.name); + +// checks statics exist +console.log(firebase.ml.SDK_VERSION); + +// checks statics exist on defaultExport +console.log(firebase.SDK_VERSION); + +// checks root exists +console.log(firebase.SDK_VERSION); + +// checks firebase named export exists on module +console.log(ml.firebase.SDK_VERSION); + +// checks multi-app support exists +console.log(firebase.ml(firebase.app()).app.name); + +// checks default export supports app arg +console.log(firebase.ml(firebase.app('foo')).app.name); + +console.log(firebase.ml.MLCloudTextRecognizerModelType.DENSE_MODEL); +console.log(ml.MLCloudTextRecognizerModelType.SPARSE_MODEL); + +console.log(firebase.ml.MLDocumentTextRecognizedBreakType.EOL_SURE_SPACE); +console.log(ml.MLDocumentTextRecognizedBreakType.HYPHEN); + +console.log(firebase.ml.MLCloudLandmarkRecognizerModelType.LATEST_MODEL); +console.log(ml.MLCloudLandmarkRecognizerModelType.STABLE_MODEL); diff --git a/tests/app.js b/tests/app.js index e1c01c1487..0ddfed57b9 100644 --- a/tests/app.js +++ b/tests/app.js @@ -29,8 +29,7 @@ import '@react-native-firebase/functions'; import '@react-native-firebase/iid'; import '@react-native-firebase/in-app-messaging'; import '@react-native-firebase/messaging'; -import '@react-native-firebase/ml-natural-language'; -import '@react-native-firebase/ml-vision'; +import '@react-native-firebase/ml'; import '@react-native-firebase/perf'; import '@react-native-firebase/remote-config'; import '@react-native-firebase/storage'; diff --git a/tests/e2e/mocha.opts b/tests/e2e/mocha.opts index f2b52f0b57..277057ef9f 100644 --- a/tests/e2e/mocha.opts +++ b/tests/e2e/mocha.opts @@ -12,7 +12,8 @@ ../packages/analytics/e2e/*.e2e.js -../packages/auth/e2e/*.e2e.js +# FIXME temporary, API limits make these failure-prone, toggled off during development +#../packages/auth/e2e/*.e2e.js # TODO a lot of these failing on CI - might be an API rate limit change # ../packages/admob/e2e/*.e2e.js @@ -29,10 +30,8 @@ ../packages/remote-config/e2e/*.e2e.js -../packages/ml-natural-language/e2e/*.e2e.js - # TODO - ci crashing Android -# ../packages/ml-vision/e2e/*.e2e.js +../packages/ml/e2e/*.e2e.js ../packages/in-app-messaging/e2e/*.e2e.js diff --git a/tests/firebase.json b/tests/firebase.json index f1df7b7dff..e5d42e83e1 100644 --- a/tests/firebase.json +++ b/tests/firebase.json @@ -9,23 +9,12 @@ "crashlytics_disable_auto_disabler": false, "crashlytics_auto_collection_enabled": true, - "ml_natural_language_language_id_model" : true, - "ml_natural_language_smart_reply_model" : true, - - "ml_vision_face_model" : true, - "ml_vision_ocr_model" : true, - "ml_vision_barcode_model" : true, - "messaging_auto_init_enabled": true, "messaging_android_headless_task_timeout": 30000, "messaging_android_notification_channel_id": "", "messaging_android_notification_color": "@color/hotpink", "messaging_ios_auto_register_for_remote_messages": true, - "ml_vision_label_model": true, - "ml_vision_image_label_model": true, - - "TODO_ml_natural_language_translate_model" : true, "TODO_analytics_auto_collection_enabled": true, "TODO_perf_auto_collection_enabled": true, "TODO_in_app_messaging_auto_collection_enabled": true, diff --git a/tests/ios/Podfile b/tests/ios/Podfile index 79efea04e6..670f206d7f 100644 --- a/tests/ios/Podfile +++ b/tests/ios/Podfile @@ -1,12 +1,8 @@ -platform :ios, '9.0' - -# Allow using RNFirebase as static frameworks +platform :ios, '10.0' $RNFirebaseAsStaticFramework = false # Version override testing -$FirebaseSDKVersion = '6.34.0' -# $FabricSDKVersion = '1.6.0' -# $CrashlyticsSDKVersion = '3.1.0' +$FirebaseSDKVersion = '7.0.0' require_relative '../node_modules/@react-native-community/cli-platform-ios/native_modules' @@ -45,7 +41,7 @@ target 'testing' do pod 'Folly', :podspec => '../node_modules/react-native/third-party-podspecs/Folly.podspec' use_native_modules! - #pod 'FirebaseFirestore', :git => 'https://github.com/invertase/firestore-ios-sdk-frameworks.git', :branch => 'master' + pod 'FirebaseFirestore', :git => 'https://github.com/invertase/firestore-ios-sdk-frameworks.git', :branch => 'master' end post_install do |installer| diff --git a/tests/ios/Podfile.lock b/tests/ios/Podfile.lock index 6ed96bba33..722a2aa11b 100644 --- a/tests/ios/Podfile.lock +++ b/tests/ios/Podfile.lock @@ -1,224 +1,5 @@ PODS: - - abseil/algorithm (0.20200225.0): - - abseil/algorithm/algorithm (= 0.20200225.0) - - abseil/algorithm/container (= 0.20200225.0) - - abseil/algorithm/algorithm (0.20200225.0): - - abseil/base/config - - abseil/algorithm/container (0.20200225.0): - - abseil/algorithm/algorithm - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/base (0.20200225.0): - - abseil/base/atomic_hook (= 0.20200225.0) - - abseil/base/base (= 0.20200225.0) - - abseil/base/base_internal (= 0.20200225.0) - - abseil/base/bits (= 0.20200225.0) - - abseil/base/config (= 0.20200225.0) - - abseil/base/core_headers (= 0.20200225.0) - - abseil/base/dynamic_annotations (= 0.20200225.0) - - abseil/base/endian (= 0.20200225.0) - - abseil/base/errno_saver (= 0.20200225.0) - - abseil/base/exponential_biased (= 0.20200225.0) - - abseil/base/log_severity (= 0.20200225.0) - - abseil/base/malloc_internal (= 0.20200225.0) - - abseil/base/periodic_sampler (= 0.20200225.0) - - abseil/base/pretty_function (= 0.20200225.0) - - abseil/base/raw_logging_internal (= 0.20200225.0) - - abseil/base/spinlock_wait (= 0.20200225.0) - - abseil/base/throw_delegate (= 0.20200225.0) - - abseil/base/atomic_hook (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/base (0.20200225.0): - - abseil/base/atomic_hook - - abseil/base/base_internal - - abseil/base/config - - abseil/base/core_headers - - abseil/base/dynamic_annotations - - abseil/base/log_severity - - abseil/base/raw_logging_internal - - abseil/base/spinlock_wait - - abseil/meta/type_traits - - abseil/base/base_internal (0.20200225.0): - - abseil/base/config - - abseil/meta/type_traits - - abseil/base/bits (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/config (0.20200225.0) - - abseil/base/core_headers (0.20200225.0): - - abseil/base/config - - abseil/base/dynamic_annotations (0.20200225.0) - - abseil/base/endian (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/errno_saver (0.20200225.0): - - abseil/base/config - - abseil/base/exponential_biased (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/log_severity (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/malloc_internal (0.20200225.0): - - abseil/base/base - - abseil/base/base_internal - - abseil/base/config - - abseil/base/core_headers - - abseil/base/dynamic_annotations - - abseil/base/raw_logging_internal - - abseil/base/periodic_sampler (0.20200225.0): - - abseil/base/core_headers - - abseil/base/exponential_biased - - abseil/base/pretty_function (0.20200225.0) - - abseil/base/raw_logging_internal (0.20200225.0): - - abseil/base/atomic_hook - - abseil/base/config - - abseil/base/core_headers - - abseil/base/log_severity - - abseil/base/spinlock_wait (0.20200225.0): - - abseil/base/base_internal - - abseil/base/core_headers - - abseil/base/errno_saver - - abseil/base/throw_delegate (0.20200225.0): - - abseil/base/config - - abseil/base/raw_logging_internal - - abseil/container/compressed_tuple (0.20200225.0): - - abseil/utility/utility - - abseil/container/inlined_vector (0.20200225.0): - - abseil/algorithm/algorithm - - abseil/base/core_headers - - abseil/base/throw_delegate - - abseil/container/inlined_vector_internal - - abseil/memory/memory - - abseil/container/inlined_vector_internal (0.20200225.0): - - abseil/base/core_headers - - abseil/container/compressed_tuple - - abseil/memory/memory - - abseil/meta/type_traits - - abseil/types/span - - abseil/memory (0.20200225.0): - - abseil/memory/memory (= 0.20200225.0) - - abseil/memory/memory (0.20200225.0): - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/meta (0.20200225.0): - - abseil/meta/type_traits (= 0.20200225.0) - - abseil/meta/type_traits (0.20200225.0): - - abseil/base/config - - abseil/numeric/int128 (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/strings/internal (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/base/endian - - abseil/base/raw_logging_internal - - abseil/meta/type_traits - - abseil/strings/str_format (0.20200225.0): - - abseil/strings/str_format_internal - - abseil/strings/str_format_internal (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/numeric/int128 - - abseil/strings/strings - - abseil/types/span - - abseil/strings/strings (0.20200225.0): - - abseil/base/base - - abseil/base/bits - - abseil/base/config - - abseil/base/core_headers - - abseil/base/endian - - abseil/base/raw_logging_internal - - abseil/base/throw_delegate - - abseil/memory/memory - - abseil/meta/type_traits - - abseil/numeric/int128 - - abseil/strings/internal - - abseil/time (0.20200225.0): - - abseil/time/internal (= 0.20200225.0) - - abseil/time/time (= 0.20200225.0) - - abseil/time/internal (0.20200225.0): - - abseil/time/internal/cctz (= 0.20200225.0) - - abseil/time/internal/cctz (0.20200225.0): - - abseil/time/internal/cctz/civil_time (= 0.20200225.0) - - abseil/time/internal/cctz/time_zone (= 0.20200225.0) - - abseil/time/internal/cctz/civil_time (0.20200225.0): - - abseil/base/config - - abseil/time/internal/cctz/time_zone (0.20200225.0): - - abseil/base/config - - abseil/time/internal/cctz/civil_time - - abseil/time/time (0.20200225.0): - - abseil/base/base - - abseil/base/core_headers - - abseil/base/raw_logging_internal - - abseil/numeric/int128 - - abseil/strings/strings - - abseil/time/internal/cctz/civil_time - - abseil/time/internal/cctz/time_zone - - abseil/types (0.20200225.0): - - abseil/types/any (= 0.20200225.0) - - abseil/types/bad_any_cast (= 0.20200225.0) - - abseil/types/bad_any_cast_impl (= 0.20200225.0) - - abseil/types/bad_optional_access (= 0.20200225.0) - - abseil/types/bad_variant_access (= 0.20200225.0) - - abseil/types/compare (= 0.20200225.0) - - abseil/types/optional (= 0.20200225.0) - - abseil/types/span (= 0.20200225.0) - - abseil/types/variant (= 0.20200225.0) - - abseil/types/any (0.20200225.0): - - abseil/base/config - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/types/bad_any_cast - - abseil/utility/utility - - abseil/types/bad_any_cast (0.20200225.0): - - abseil/base/config - - abseil/types/bad_any_cast_impl - - abseil/types/bad_any_cast_impl (0.20200225.0): - - abseil/base/config - - abseil/base/raw_logging_internal - - abseil/types/bad_optional_access (0.20200225.0): - - abseil/base/config - - abseil/base/raw_logging_internal - - abseil/types/bad_variant_access (0.20200225.0): - - abseil/base/config - - abseil/base/raw_logging_internal - - abseil/types/compare (0.20200225.0): - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/types/optional (0.20200225.0): - - abseil/base/base_internal - - abseil/base/config - - abseil/base/core_headers - - abseil/memory/memory - - abseil/meta/type_traits - - abseil/types/bad_optional_access - - abseil/utility/utility - - abseil/types/span (0.20200225.0): - - abseil/algorithm/algorithm - - abseil/base/core_headers - - abseil/base/throw_delegate - - abseil/meta/type_traits - - abseil/types/variant (0.20200225.0): - - abseil/base/base_internal - - abseil/base/config - - abseil/base/core_headers - - abseil/meta/type_traits - - abseil/types/bad_variant_access - - abseil/utility/utility - - abseil/utility/utility (0.20200225.0): - - abseil/base/base_internal - - abseil/base/config - - abseil/meta/type_traits - boost-for-react-native (1.63.0) - - BoringSSL-GRPC (0.0.7): - - BoringSSL-GRPC/Implementation (= 0.0.7) - - BoringSSL-GRPC/Interface (= 0.0.7) - - BoringSSL-GRPC/Implementation (0.0.7): - - BoringSSL-GRPC/Interface (= 0.0.7) - - BoringSSL-GRPC/Interface (0.0.7) - DoubleConversion (1.1.6) - FBLazyVector (0.62.2) - FBReactNativeSpec (0.62.2): @@ -228,214 +9,154 @@ PODS: - React-Core (= 0.62.2) - React-jsi (= 0.62.2) - ReactCommon/turbomodule/core (= 0.62.2) - - Firebase/AdMob (6.34.0): + - Firebase/AdMob (7.0.0): - Firebase/CoreOnly - - Google-Mobile-Ads-SDK (~> 7.63) - - Firebase/Analytics (6.34.0): + - Google-Mobile-Ads-SDK (~> 7.66) + - Firebase/Analytics (7.0.0): - Firebase/Core - - Firebase/Auth (6.34.0): + - Firebase/Auth (7.0.0): - Firebase/CoreOnly - - FirebaseAuth (~> 6.9.2) - - Firebase/Core (6.34.0): + - FirebaseAuth (~> 7.0.0) + - Firebase/Core (7.0.0): - Firebase/CoreOnly - - FirebaseAnalytics (= 6.9.0) - - Firebase/CoreOnly (6.34.0): - - FirebaseCore (= 6.10.4) - - Firebase/Crashlytics (6.34.0): + - FirebaseAnalytics (= 7.0.0) + - Firebase/CoreOnly (7.0.0): + - FirebaseCore (= 7.0.0) + - Firebase/Crashlytics (7.0.0): - Firebase/CoreOnly - - FirebaseCrashlytics (~> 4.6.2) - - Firebase/Database (6.34.0): + - FirebaseCrashlytics (~> 7.0.0) + - Firebase/Database (7.0.0): - Firebase/CoreOnly - - FirebaseDatabase (~> 6.6.0) - - Firebase/DynamicLinks (6.34.0): + - FirebaseDatabase (~> 7.0.0) + - Firebase/DynamicLinks (7.0.0): - Firebase/CoreOnly - - FirebaseDynamicLinks (~> 4.3.1) - - Firebase/Firestore (6.34.0): + - FirebaseDynamicLinks (~> 7.0.0) + - Firebase/Firestore (7.0.0): - Firebase/CoreOnly - - FirebaseFirestore (~> 1.19.0) - - Firebase/Functions (6.34.0): + - FirebaseFirestore (~> 7.0.0) + - Firebase/Functions (7.0.0): - Firebase/CoreOnly - - FirebaseFunctions (~> 2.9.0) - - Firebase/InAppMessaging (6.34.0): + - FirebaseFunctions (~> 7.0.0) + - Firebase/InAppMessaging (7.0.0): - Firebase/CoreOnly - - FirebaseInAppMessaging (~> 0.24.0) - - Firebase/Messaging (6.34.0): + - FirebaseInAppMessaging (~> 7.0.0-beta) + - Firebase/Messaging (7.0.0): - Firebase/CoreOnly - - FirebaseMessaging (~> 4.7.1) - - Firebase/MLCommon (6.34.0): + - FirebaseMessaging (~> 7.0.0) + - Firebase/MLVision (7.0.0): - Firebase/CoreOnly - - FirebaseMLCommon (~> 0.21.0) - - Firebase/MLNaturalLanguage (6.34.0): + - FirebaseMLVision (~> 7.0.0-beta) + - Firebase/Performance (7.0.0): - Firebase/CoreOnly - - FirebaseMLNaturalLanguage (~> 0.18.0) - - Firebase/MLNLLanguageID (6.34.0): + - FirebasePerformance (~> 7.0.0) + - Firebase/RemoteConfig (7.0.0): - Firebase/CoreOnly - - FirebaseMLNLLanguageID (~> 0.18.0) - - Firebase/MLNLSmartReply (6.34.0): + - FirebaseRemoteConfig (~> 7.0.0) + - Firebase/Storage (7.0.0): - Firebase/CoreOnly - - FirebaseMLNLSmartReply (~> 0.18.0) - - Firebase/MLVision (6.34.0): - - Firebase/CoreOnly - - FirebaseMLVision (~> 0.21.0) - - Firebase/MLVisionBarcodeModel (6.34.0): - - Firebase/CoreOnly - - FirebaseMLVisionBarcodeModel (~> 0.21.0) - - Firebase/MLVisionFaceModel (6.34.0): - - Firebase/CoreOnly - - FirebaseMLVisionFaceModel (~> 0.21.0) - - Firebase/MLVisionLabelModel (6.34.0): - - Firebase/CoreOnly - - FirebaseMLVisionLabelModel (~> 0.21.0) - - Firebase/MLVisionTextModel (6.34.0): - - Firebase/CoreOnly - - FirebaseMLVisionTextModel (~> 0.21.0) - - Firebase/Performance (6.34.0): - - Firebase/CoreOnly - - FirebasePerformance (~> 3.3.1) - - Firebase/RemoteConfig (6.34.0): - - Firebase/CoreOnly - - FirebaseRemoteConfig (~> 4.9.1) - - Firebase/Storage (6.34.0): - - Firebase/CoreOnly - - FirebaseStorage (~> 3.9.1) - - FirebaseABTesting (4.2.0): - - FirebaseCore (~> 6.10) - - FirebaseAnalytics (6.9.0): - - FirebaseCore (~> 6.10) - - FirebaseInstallations (~> 1.7) - - GoogleAppMeasurement (= 6.9.0) - - GoogleUtilities/AppDelegateSwizzler (~> 6.7) - - GoogleUtilities/MethodSwizzler (~> 6.7) - - GoogleUtilities/Network (~> 6.7) - - "GoogleUtilities/NSData+zlib (~> 6.7)" - - nanopb (~> 1.30906.0) - - FirebaseAuth (6.9.2): - - FirebaseCore (~> 6.10) - - GoogleUtilities/AppDelegateSwizzler (~> 6.7) - - GoogleUtilities/Environment (~> 6.7) - - GTMSessionFetcher/Core (~> 1.1) - - FirebaseCore (6.10.4): - - FirebaseCoreDiagnostics (~> 1.6) - - GoogleUtilities/Environment (~> 6.7) - - GoogleUtilities/Logger (~> 6.7) - - FirebaseCoreDiagnostics (1.7.0): - - GoogleDataTransport (~> 7.4) - - GoogleUtilities/Environment (~> 6.7) - - GoogleUtilities/Logger (~> 6.7) - - nanopb (~> 1.30906.0) - - FirebaseCrashlytics (4.6.2): - - FirebaseCore (~> 6.10) - - FirebaseInstallations (~> 1.6) - - GoogleDataTransport (~> 7.2) - - nanopb (~> 1.30906.0) + - FirebaseStorage (~> 7.0.0) + - FirebaseABTesting (7.1.0): + - FirebaseCore (~> 7.0) + - FirebaseAnalytics (7.0.0): + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - GoogleAppMeasurement (= 7.0.0) + - GoogleUtilities/AppDelegateSwizzler (~> 7.0) + - GoogleUtilities/MethodSwizzler (~> 7.0) + - GoogleUtilities/Network (~> 7.0) + - "GoogleUtilities/NSData+zlib (~> 7.0)" + - nanopb (~> 2.30906.0) + - FirebaseAuth (7.0.0): + - FirebaseCore (~> 7.0) + - GoogleUtilities/AppDelegateSwizzler (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - GTMSessionFetcher/Core (~> 1.4) + - FirebaseCore (7.0.0): + - FirebaseCoreDiagnostics (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/Logger (~> 7.0) + - FirebaseCoreDiagnostics (7.1.0): + - GoogleDataTransport (~> 8.0) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/Logger (~> 7.0) + - nanopb (~> 2.30906.0) + - FirebaseCrashlytics (7.0.0): + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - GoogleDataTransport (~> 8.0) + - nanopb (~> 2.30906.0) - PromisesObjC (~> 1.2) - - FirebaseDatabase (6.6.0): - - FirebaseCore (~> 6.10) - - leveldb-library (~> 1.22) - - FirebaseDynamicLinks (4.3.1): - - FirebaseCore (~> 6.10) - - FirebaseFirestore (1.19.0): - - abseil/algorithm (= 0.20200225.0) - - abseil/base (= 0.20200225.0) - - abseil/memory (= 0.20200225.0) - - abseil/meta (= 0.20200225.0) - - abseil/strings/strings (= 0.20200225.0) - - abseil/time (= 0.20200225.0) - - abseil/types (= 0.20200225.0) - - FirebaseCore (~> 6.10) - - "gRPC-C++ (~> 1.28.0)" + - FirebaseDatabase (7.0.0): + - FirebaseCore (~> 7.0) - leveldb-library (~> 1.22) - - nanopb (~> 1.30906.0) - - FirebaseFunctions (2.9.0): - - FirebaseCore (~> 6.10) - - GTMSessionFetcher/Core (~> 1.1) - - FirebaseInAppMessaging (0.24.0): - - FirebaseABTesting (~> 4.2) - - FirebaseCore (~> 6.10) - - FirebaseInstallations (~> 1.6) - - GoogleUtilities/Environment (~> 6.7) - - nanopb (~> 1.30906.0) - - FirebaseInstallations (1.7.0): - - FirebaseCore (~> 6.10) - - GoogleUtilities/Environment (~> 6.7) - - GoogleUtilities/UserDefaults (~> 6.7) + - FirebaseDynamicLinks (7.0.0): + - FirebaseCore (~> 7.0) + - FirebaseFirestore (7.0.0) + - FirebaseFunctions (7.0.0): + - FirebaseCore (~> 7.0) + - GTMSessionFetcher/Core (~> 1.4) + - FirebaseInAppMessaging (7.0.0-beta): + - FirebaseABTesting (~> 7.0) + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - nanopb (~> 2.30906.0) + - FirebaseInstallations (7.1.0): + - FirebaseCore (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/UserDefaults (~> 7.0) - PromisesObjC (~> 1.2) - - FirebaseInstanceID (4.8.0): - - FirebaseCore (~> 6.10) - - FirebaseInstallations (~> 1.6) - - GoogleUtilities/Environment (~> 6.7) - - GoogleUtilities/UserDefaults (~> 6.7) - - FirebaseMessaging (4.7.1): - - FirebaseCore (~> 6.10) - - FirebaseInstanceID (~> 4.7) - - GoogleUtilities/AppDelegateSwizzler (~> 6.7) - - GoogleUtilities/Environment (~> 6.7) - - GoogleUtilities/Reachability (~> 6.7) - - GoogleUtilities/UserDefaults (~> 6.7) - - Protobuf (>= 3.9.2, ~> 3.9) - - FirebaseMLCommon (0.21.0): - - FirebaseCore (~> 6.9) - - FirebaseInstallations (~> 1.5) - - GoogleToolboxForMac/Logger (~> 2.1) - - "GoogleToolboxForMac/NSData+zlib (~> 2.1)" - - "GoogleToolboxForMac/NSDictionary+URLArguments (~> 2.1)" - - GoogleUtilities/UserDefaults (~> 6.0) - - GTMSessionFetcher/Core (~> 1.1) - - Protobuf (~> 3.12) - - FirebaseMLNaturalLanguage (0.18.0): - - FirebaseCore (~> 6.9) - - FirebaseMLCommon (~> 0.21) + - FirebaseInstanceID (7.1.0): + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/UserDefaults (~> 7.0) + - FirebaseMessaging (7.0.0): + - FirebaseCore (~> 7.0) + - FirebaseInstanceID (~> 7.0) + - GoogleUtilities/AppDelegateSwizzler (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/Reachability (~> 7.0) + - GoogleUtilities/UserDefaults (~> 7.0) + - FirebaseMLCommon (7.1.0-beta): + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) - GoogleToolboxForMac/Logger (~> 2.1) - "GoogleToolboxForMac/NSData+zlib (~> 2.1)" - "GoogleToolboxForMac/NSDictionary+URLArguments (~> 2.1)" + - GoogleUtilities/UserDefaults (~> 7.0) - GTMSessionFetcher/Core (~> 1.1) - Protobuf (~> 3.12) - - FirebaseMLNLLanguageID (0.18.0): - - FirebaseCore (~> 6.9) - - FirebaseMLNaturalLanguage (~> 0.18) - - FirebaseMLNLSmartReply (0.18.0): - - FirebaseCore (~> 6.9) - - FirebaseMLNaturalLanguage (~> 0.18) - - FirebaseMLNLLanguageID (~> 0.18) - - FirebaseRemoteConfig (~> 4.7) - - FirebaseMLVision (0.21.0): - - FirebaseCore (~> 6.9) - - FirebaseMLCommon (~> 0.21) + - FirebaseMLVision (7.0.0-beta): + - FirebaseCore (~> 7.0) + - FirebaseMLCommon (~> 7.0-beta) - GoogleAPIClientForREST/Core (~> 1.3) - GoogleAPIClientForREST/Vision (~> 1.3) - GoogleToolboxForMac/Logger (~> 2.1) - "GoogleToolboxForMac/NSData+zlib (~> 2.1)" - GTMSessionFetcher/Core (~> 1.1) - Protobuf (~> 3.12) - - FirebaseMLVisionBarcodeModel (0.21.0): - - FirebaseMLVision (~> 0.21) - - FirebaseMLVisionFaceModel (0.21.0): - - FirebaseMLVision (~> 0.21) - - FirebaseMLVisionLabelModel (0.21.0): - - FirebaseMLVision (~> 0.21) - - FirebaseMLVisionTextModel (0.21.0): - - FirebaseMLVision (~> 0.21) - - FirebasePerformance (3.3.1): - - FirebaseCore (~> 6.9) - - FirebaseInstallations (~> 1.5) - - FirebaseRemoteConfig (~> 4.7) - - GoogleDataTransport (~> 7.0) + - FirebasePerformance (7.0.1): + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - FirebaseRemoteConfig (~> 7.0) + - GoogleDataTransport (~> 8.0) - GoogleToolboxForMac/Logger (~> 2.1) - "GoogleToolboxForMac/NSData+zlib (~> 2.1)" - - GoogleUtilities/Environment (~> 6.2) - - GoogleUtilities/ISASwizzler (~> 6.2) - - GoogleUtilities/MethodSwizzler (~> 6.2) + - GoogleUtilities/Environment (~> 7.0) + - GoogleUtilities/ISASwizzler (~> 7.0) + - GoogleUtilities/MethodSwizzler (~> 7.0) - GTMSessionFetcher/Core (~> 1.1) - Protobuf (~> 3.12) - - FirebaseRemoteConfig (4.9.1): - - FirebaseABTesting (~> 4.2) - - FirebaseCore (~> 6.10) - - FirebaseInstallations (~> 1.6) - - GoogleUtilities/Environment (~> 6.7) - - "GoogleUtilities/NSData+zlib (~> 6.7)" - - FirebaseStorage (3.9.1): - - FirebaseCore (~> 6.10) - - GTMSessionFetcher/Core (~> 1.1) + - FirebaseRemoteConfig (7.0.0): + - FirebaseABTesting (~> 7.0) + - FirebaseCore (~> 7.0) + - FirebaseInstallations (~> 7.0) + - GoogleUtilities/Environment (~> 7.0) + - "GoogleUtilities/NSData+zlib (~> 7.0)" + - FirebaseStorage (7.0.0): + - FirebaseCore (~> 7.0) + - GTMSessionFetcher/Core (~> 1.4) - Folly (2018.10.22.00): - boost-for-react-native - DoubleConversion @@ -446,92 +167,68 @@ PODS: - DoubleConversion - glog - glog (0.3.5) - - Google-Mobile-Ads-SDK (7.66.0): - - GoogleAppMeasurement (~> 6.0) + - Google-Mobile-Ads-SDK (7.68.0): + - GoogleAppMeasurement (~> 7.0) - GoogleUserMessagingPlatform (~> 1.1) - - GoogleAPIClientForREST/Core (1.4.3): + - GoogleAPIClientForREST/Core (1.5.1): - GTMSessionFetcher (>= 1.1.7) - - GoogleAPIClientForREST/Vision (1.4.3): + - GoogleAPIClientForREST/Vision (1.5.1): - GoogleAPIClientForREST/Core - GTMSessionFetcher (>= 1.1.7) - - GoogleAppMeasurement (6.9.0): - - GoogleUtilities/AppDelegateSwizzler (~> 6.7) - - GoogleUtilities/MethodSwizzler (~> 6.7) - - GoogleUtilities/Network (~> 6.7) - - "GoogleUtilities/NSData+zlib (~> 6.7)" - - nanopb (~> 1.30906.0) - - GoogleDataTransport (7.5.1): - - nanopb (~> 1.30906.0) - - GoogleToolboxForMac/DebugUtils (2.2.2): - - GoogleToolboxForMac/Defines (= 2.2.2) - - GoogleToolboxForMac/Defines (2.2.2) - - GoogleToolboxForMac/Logger (2.2.2): - - GoogleToolboxForMac/Defines (= 2.2.2) - - "GoogleToolboxForMac/NSData+zlib (2.2.2)": - - GoogleToolboxForMac/Defines (= 2.2.2) - - "GoogleToolboxForMac/NSDictionary+URLArguments (2.2.2)": - - GoogleToolboxForMac/DebugUtils (= 2.2.2) - - GoogleToolboxForMac/Defines (= 2.2.2) - - "GoogleToolboxForMac/NSString+URLArguments (= 2.2.2)" - - "GoogleToolboxForMac/NSString+URLArguments (2.2.2)" - - GoogleUserMessagingPlatform (1.2.0) - - GoogleUtilities/AppDelegateSwizzler (6.7.2): + - GoogleAppMeasurement (7.0.0): + - GoogleUtilities/AppDelegateSwizzler (~> 7.0) + - GoogleUtilities/MethodSwizzler (~> 7.0) + - GoogleUtilities/Network (~> 7.0) + - "GoogleUtilities/NSData+zlib (~> 7.0)" + - nanopb (~> 2.30906.0) + - GoogleDataTransport (8.0.1): + - nanopb (~> 2.30906.0) + - GoogleToolboxForMac/DebugUtils (2.3.0): + - GoogleToolboxForMac/Defines (= 2.3.0) + - GoogleToolboxForMac/Defines (2.3.0) + - GoogleToolboxForMac/Logger (2.3.0): + - GoogleToolboxForMac/Defines (= 2.3.0) + - "GoogleToolboxForMac/NSData+zlib (2.3.0)": + - GoogleToolboxForMac/Defines (= 2.3.0) + - "GoogleToolboxForMac/NSDictionary+URLArguments (2.3.0)": + - GoogleToolboxForMac/DebugUtils (= 2.3.0) + - GoogleToolboxForMac/Defines (= 2.3.0) + - "GoogleToolboxForMac/NSString+URLArguments (= 2.3.0)" + - "GoogleToolboxForMac/NSString+URLArguments (2.3.0)" + - GoogleUserMessagingPlatform (1.3.0) + - GoogleUtilities/AppDelegateSwizzler (7.1.0): - GoogleUtilities/Environment - GoogleUtilities/Logger - GoogleUtilities/Network - - GoogleUtilities/Environment (6.7.2): + - GoogleUtilities/Environment (7.1.0): - PromisesObjC (~> 1.2) - - GoogleUtilities/ISASwizzler (6.7.2) - - GoogleUtilities/Logger (6.7.2): + - GoogleUtilities/ISASwizzler (7.1.0) + - GoogleUtilities/Logger (7.1.0): - GoogleUtilities/Environment - - GoogleUtilities/MethodSwizzler (6.7.2): + - GoogleUtilities/MethodSwizzler (7.1.0): - GoogleUtilities/Logger - - GoogleUtilities/Network (6.7.2): + - GoogleUtilities/Network (7.1.0): - GoogleUtilities/Logger - "GoogleUtilities/NSData+zlib" - GoogleUtilities/Reachability - - "GoogleUtilities/NSData+zlib (6.7.2)" - - GoogleUtilities/Reachability (6.7.2): + - "GoogleUtilities/NSData+zlib (7.1.0)" + - GoogleUtilities/Reachability (7.1.0): - GoogleUtilities/Logger - - GoogleUtilities/UserDefaults (6.7.2): + - GoogleUtilities/UserDefaults (7.1.0): - GoogleUtilities/Logger - - "gRPC-C++ (1.28.2)": - - "gRPC-C++/Implementation (= 1.28.2)" - - "gRPC-C++/Interface (= 1.28.2)" - - "gRPC-C++/Implementation (1.28.2)": - - abseil/container/inlined_vector (= 0.20200225.0) - - abseil/memory/memory (= 0.20200225.0) - - abseil/strings/str_format (= 0.20200225.0) - - abseil/strings/strings (= 0.20200225.0) - - abseil/types/optional (= 0.20200225.0) - - "gRPC-C++/Interface (= 1.28.2)" - - gRPC-Core (= 1.28.2) - - "gRPC-C++/Interface (1.28.2)" - - gRPC-Core (1.28.2): - - gRPC-Core/Implementation (= 1.28.2) - - gRPC-Core/Interface (= 1.28.2) - - gRPC-Core/Implementation (1.28.2): - - abseil/container/inlined_vector (= 0.20200225.0) - - abseil/memory/memory (= 0.20200225.0) - - abseil/strings/str_format (= 0.20200225.0) - - abseil/strings/strings (= 0.20200225.0) - - abseil/types/optional (= 0.20200225.0) - - BoringSSL-GRPC (= 0.0.7) - - gRPC-Core/Interface (= 1.28.2) - - gRPC-Core/Interface (1.28.2) - - GTMSessionFetcher (1.4.0): - - GTMSessionFetcher/Full (= 1.4.0) - - GTMSessionFetcher/Core (1.4.0) - - GTMSessionFetcher/Full (1.4.0): - - GTMSessionFetcher/Core (= 1.4.0) + - GTMSessionFetcher (1.5.0): + - GTMSessionFetcher/Full (= 1.5.0) + - GTMSessionFetcher/Core (1.5.0) + - GTMSessionFetcher/Full (1.5.0): + - GTMSessionFetcher/Core (= 1.5.0) - Jet (0.6.6-0): - React - leveldb-library (1.22) - - nanopb (1.30906.0): - - nanopb/decode (= 1.30906.0) - - nanopb/encode (= 1.30906.0) - - nanopb/decode (1.30906.0) - - nanopb/encode (1.30906.0) + - nanopb (2.30906.0): + - nanopb/decode (= 2.30906.0) + - nanopb/encode (= 2.30906.0) + - nanopb/decode (2.30906.0) + - nanopb/encode (2.30906.0) - PersonalizedAdConsent (1.0.5) - PromisesObjC (1.2.11) - Protobuf (3.13.0) @@ -757,81 +454,70 @@ PODS: - React-cxxreact (= 0.62.2) - React-jsi (= 0.62.2) - ReactCommon/callinvoker (= 0.62.2) - - RNFBAdMob (7.6.9): - - Firebase/AdMob (= 6.34.0) + - RNFBAdMob (7.6.10): + - Firebase/AdMob (= 7.0.0) - PersonalizedAdConsent (~> 1.0.4) - React-Core - RNFBApp - - RNFBAnalytics (7.6.8): - - Firebase/Analytics (= 6.34.0) + - RNFBAnalytics (7.6.10): + - Firebase/Analytics (= 7.0.0) - React-Core - RNFBApp - - RNFBApp (8.4.6): - - Firebase/CoreOnly (= 6.34.0) + - RNFBApp (8.4.7): + - Firebase/CoreOnly (= 7.0.0) - React-Core - - RNFBAuth (9.3.1): - - Firebase/Auth (= 6.34.0) + - RNFBAuth (9.3.3): + - Firebase/Auth (= 7.0.0) - React-Core - RNFBApp - - RNFBCrashlytics (8.4.11): - - Firebase/Crashlytics (= 6.34.0) + - RNFBCrashlytics (8.5.0): + - Firebase/Crashlytics (= 7.0.0) - React-Core - RNFBApp - - RNFBDatabase (7.5.12): - - Firebase/Database (= 6.34.0) + - RNFBDatabase (7.5.13): + - Firebase/Database (= 7.0.0) - React-Core - RNFBApp - - RNFBDynamicLinks (7.5.10): - - Firebase/DynamicLinks (= 6.34.0) + - RNFBDynamicLinks (7.5.11): + - Firebase/DynamicLinks (= 7.0.0) - GoogleUtilities/AppDelegateSwizzler - React-Core - RNFBApp - - RNFBFirestore (7.8.7): - - Firebase/Firestore (= 6.34.0) + - RNFBFirestore (7.10.0): + - Firebase/Firestore (= 7.0.0) - React-Core - RNFBApp - - RNFBFunctions (7.4.9): - - Firebase/Functions (= 6.34.0) + - RNFBFunctions (7.4.10): + - Firebase/Functions (= 7.0.0) - React-Core - RNFBApp - - RNFBIid (7.4.9): - - Firebase/CoreOnly (= 6.34.0) + - RNFBIid (7.4.10): + - Firebase/CoreOnly (= 7.0.0) - FirebaseInstanceID - React-Core - RNFBApp - - RNFBInAppMessaging (7.5.7): - - Firebase/InAppMessaging (= 6.34.0) + - RNFBInAppMessaging (7.5.8): + - Firebase/InAppMessaging (= 7.0.0) - React-Core - RNFBApp - - RNFBMessaging (7.9.1): - - Firebase/Messaging (= 6.34.0) + - RNFBMessaging (7.9.2): + - Firebase/Messaging (= 7.0.0) - React-Core - RNFBApp - - RNFBMLNaturalLanguage (7.4.9): - - Firebase/MLCommon (= 6.34.0) - - Firebase/MLNaturalLanguage (= 6.34.0) - - Firebase/MLNLLanguageID (= 6.34.0) - - Firebase/MLNLSmartReply (= 6.34.0) + - RNFBML (7.4.13): + - Firebase/MLVision (= 7.0.0) - React-Core - RNFBApp - - RNFBMLVision (7.4.11): - - Firebase/MLVision (= 6.34.0) - - Firebase/MLVisionBarcodeModel (= 6.34.0) - - Firebase/MLVisionFaceModel (= 6.34.0) - - Firebase/MLVisionLabelModel (= 6.34.0) - - Firebase/MLVisionTextModel (= 6.34.0) + - RNFBPerf (7.4.10): + - Firebase/Performance (= 7.0.0) - React-Core - RNFBApp - - RNFBPerf (7.4.9): - - Firebase/Performance (= 6.34.0) + - RNFBRemoteConfig (9.0.12): + - Firebase/RemoteConfig (= 7.0.0) - React-Core - RNFBApp - - RNFBRemoteConfig (9.0.11): - - Firebase/RemoteConfig (= 6.34.0) - - React-Core - - RNFBApp - - RNFBStorage (7.4.10): - - Firebase/Storage (= 6.34.0) + - RNFBStorage (7.4.12): + - Firebase/Storage (= 7.0.0) - React-Core - RNFBApp - Yoga (1.14.0) @@ -840,6 +526,7 @@ DEPENDENCIES: - DoubleConversion (from `../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`) - FBLazyVector (from `../node_modules/react-native/Libraries/FBLazyVector`) - FBReactNativeSpec (from `../node_modules/react-native/Libraries/FBReactNativeSpec`) + - FirebaseFirestore (from `https://github.com/invertase/firestore-ios-sdk-frameworks.git`, branch `master`) - Folly (from `../node_modules/react-native/third-party-podspecs/Folly.podspec`) - glog (from `../node_modules/react-native/third-party-podspecs/glog.podspec`) - Jet (from `../node_modules/jet/ios`) @@ -877,8 +564,7 @@ DEPENDENCIES: - RNFBIid (from `../../packages/iid`) - RNFBInAppMessaging (from `../../packages/in-app-messaging`) - RNFBMessaging (from `../../packages/messaging`) - - RNFBMLNaturalLanguage (from `../../packages/ml-natural-language`) - - RNFBMLVision (from `../../packages/ml-vision`) + - RNFBML (from `../../packages/ml`) - RNFBPerf (from `../../packages/perf`) - RNFBRemoteConfig (from `../../packages/remote-config`) - RNFBStorage (from `../../packages/storage`) @@ -886,9 +572,7 @@ DEPENDENCIES: SPEC REPOS: trunk: - - abseil - boost-for-react-native - - BoringSSL-GRPC - Firebase - FirebaseABTesting - FirebaseAnalytics @@ -898,21 +582,13 @@ SPEC REPOS: - FirebaseCrashlytics - FirebaseDatabase - FirebaseDynamicLinks - - FirebaseFirestore - FirebaseFunctions - FirebaseInAppMessaging - FirebaseInstallations - FirebaseInstanceID - FirebaseMessaging - FirebaseMLCommon - - FirebaseMLNaturalLanguage - - FirebaseMLNLLanguageID - - FirebaseMLNLSmartReply - FirebaseMLVision - - FirebaseMLVisionBarcodeModel - - FirebaseMLVisionFaceModel - - FirebaseMLVisionLabelModel - - FirebaseMLVisionTextModel - FirebasePerformance - FirebaseRemoteConfig - FirebaseStorage @@ -923,8 +599,6 @@ SPEC REPOS: - GoogleToolboxForMac - GoogleUserMessagingPlatform - GoogleUtilities - - "gRPC-C++" - - gRPC-Core - GTMSessionFetcher - leveldb-library - nanopb @@ -939,6 +613,9 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/Libraries/FBLazyVector" FBReactNativeSpec: :path: "../node_modules/react-native/Libraries/FBReactNativeSpec" + FirebaseFirestore: + :branch: master + :git: https://github.com/invertase/firestore-ios-sdk-frameworks.git Folly: :podspec: "../node_modules/react-native/third-party-podspecs/Folly.podspec" glog: @@ -1007,10 +684,8 @@ EXTERNAL SOURCES: :path: "../../packages/in-app-messaging" RNFBMessaging: :path: "../../packages/messaging" - RNFBMLNaturalLanguage: - :path: "../../packages/ml-natural-language" - RNFBMLVision: - :path: "../../packages/ml-vision" + RNFBML: + :path: "../../packages/ml" RNFBPerf: :path: "../../packages/perf" RNFBRemoteConfig: @@ -1020,55 +695,49 @@ EXTERNAL SOURCES: Yoga: :path: "../node_modules/react-native/ReactCommon/yoga" +CHECKOUT OPTIONS: + FirebaseFirestore: + :commit: 3d712e901b44cb4ca509844356040a2cb61ddf5a + :git: https://github.com/invertase/firestore-ios-sdk-frameworks.git + SPEC CHECKSUMS: - abseil: 6c8eb7892aefa08d929b39f9bb108e5367e3228f boost-for-react-native: 39c7adb57c4e60d6c5479dd8623128eb5b3f0f2c - BoringSSL-GRPC: 8edf627ee524575e2f8d19d56f068b448eea3879 DoubleConversion: 5805e889d232975c086db112ece9ed034df7a0b2 FBLazyVector: 4aab18c93cd9546e4bfed752b4084585eca8b245 FBReactNativeSpec: 5465d51ccfeecb7faa12f9ae0024f2044ce4044e - Firebase: c23a36d9e4cdf7877dfcba8dd0c58add66358999 - FirebaseABTesting: 8a9d8df3acc2b43f4a22014ddf9f601bca6af699 - FirebaseAnalytics: 3bb096873ee0d7fa4b6c70f5e9166b6da413cc7f - FirebaseAuth: c92d49ada7948d1a23466e3db17bc4c2039dddc3 - FirebaseCore: d3a978a3cfa3240bf7e4ba7d137fdf5b22b628ec - FirebaseCoreDiagnostics: 770ac5958e1372ce67959ae4b4f31d8e127c3ac1 - FirebaseCrashlytics: 1a747c9cc084a24dc6d9511c991db1cd078154eb - FirebaseDatabase: 13a865a4b85897462b930eb683bda8f52583713f - FirebaseDynamicLinks: 6eac37d86910382eafb6315d952cc44c9e176094 - FirebaseFirestore: 9b2f1b9b9a6f2f0b6fb7484b9e32ab7e39243554 - FirebaseFunctions: 27518fdd14d8b3a849e2443f921cd1b471ab7acd - FirebaseInAppMessaging: 9da48721c6ad1b5bdc2b1108f2d3d561eb2245ca - FirebaseInstallations: 466c7b4d1f58fe16707693091da253726a731ed2 - FirebaseInstanceID: bd3ffc24367f901a43c063b36c640b345a4a5dd1 - FirebaseMessaging: 5eca4ef173de76253352511aafef774caa1cba2a - FirebaseMLCommon: d218d75dd1c6c4e447f731ac22da56b88cb79431 - FirebaseMLNaturalLanguage: 32cccde63dfdf82341d570b3d4b24e746303d4cd - FirebaseMLNLLanguageID: 1adfdf439843d836e8d741c5124b97ebac645334 - FirebaseMLNLSmartReply: 046bdc30bddbfbead3f5cbca97f28a26a316d346 - FirebaseMLVision: 68dd092b4c52a7ac163ec0d4f541d5711fc9fec6 - FirebaseMLVisionBarcodeModel: 394cd61c52dc03558088caf82b0dade8028f57d5 - FirebaseMLVisionFaceModel: a67b2bf9b8407127a0bdb0ba98eb265637d1dc9d - FirebaseMLVisionLabelModel: c6922e607cf4549b14981c80bf0d69eb51a2b547 - FirebaseMLVisionTextModel: e9f3cba0f31022ae9dd3d246aff9849075cacd98 - FirebasePerformance: e325a8ee84a6a3d89c0be049390ed6c1775cce22 - FirebaseRemoteConfig: 35a729305f254fb15a2e541d4b36f3a379da7fdc - FirebaseStorage: 15e0f15ef3c7fec3d1899d68623e47d4447066b4 + Firebase: 50be68416f50eb4eb2ecb0e78acab9a051ef95df + FirebaseABTesting: aaea04ea67858a4a9dce0d22ef645477dff50146 + FirebaseAnalytics: c1166b7990bae464c6436132510bb718c6680f80 + FirebaseAuth: 228dd0faa5b5263eaa8c63518b16faef438289a3 + FirebaseCore: cf3122185fce1cf71cedbbc498ea84d2b3e7cb69 + FirebaseCoreDiagnostics: 872cdb9b749b23346dddd5c1014d1babd2257de3 + FirebaseCrashlytics: bd430b7323e8b49492a93e563e81899d0615f917 + FirebaseDatabase: 2481b48ebfd233ef591095d79d76720ea85cde74 + FirebaseDynamicLinks: 71ed03780db3986e1bd386d6a1be44d09d4cd0ec + FirebaseFirestore: ce5009ceae3e07c96f9cc580d9b521b9ec0af857 + FirebaseFunctions: 571aee227a021debe3e1092aa079f751623e233a + FirebaseInAppMessaging: b4c1ec3ea31d83f762d8087e78ce846159437f39 + FirebaseInstallations: 3de38553e86171b5f81d83cdeef63473d37bfdb0 + FirebaseInstanceID: 61e8d10a4192a582c6239378169d10e504ca8d91 + FirebaseMessaging: ecf9e04716b7ff1f1d92debab4d6f0e6bdb490aa + FirebaseMLCommon: d10d915b2fd1b285f7b80694afa1a65d7fb90f5c + FirebaseMLVision: 39cfe86b963ce9db9216da6630529d3089254b9a + FirebasePerformance: feb172454ef6568c8246d5713b6e65fde9f2e384 + FirebaseRemoteConfig: ff8d3542cbd919c9d3851fd544690b8848fc0402 + FirebaseStorage: ea52bc7a1cb540406ed1e1acfc2bf3946621ed34 Folly: 30e7936e1c45c08d884aa59369ed951a8e68cf51 glog: 1f3da668190260b06b429bb211bfbee5cd790c28 - Google-Mobile-Ads-SDK: 7d7074359c040f5add4e0963bf860e14690060d0 - GoogleAPIClientForREST: e2d95a611ac06a90d143c93bfd8597719f8b0938 - GoogleAppMeasurement: a6a3a066369828db64eda428cb2856dc1cdc7c4e - GoogleDataTransport: f56af7caa4ed338dc8e138a5d7c5973e66440833 - GoogleToolboxForMac: 800648f8b3127618c1b59c7f97684427630c5ea3 - GoogleUserMessagingPlatform: c85530d930ba509583aa5a6d50a10aca22cf8502 - GoogleUtilities: 7f2f5a07f888cdb145101d6042bc4422f57e70b3 - "gRPC-C++": 13d8ccef97d5c3c441b7e3c529ef28ebee86fad2 - gRPC-Core: 4afa11bfbedf7cdecd04de535a9e046893404ed5 - GTMSessionFetcher: 6f5c8abbab8a9bce4bb3f057e317728ec6182b10 + Google-Mobile-Ads-SDK: 29bbdb182d69ff606cc0301da1590b40be8d2205 + GoogleAPIClientForREST: 4bb409633efcc2e1b3f945afe7e35039b5a61db2 + GoogleAppMeasurement: 7790ef975d1d463c8614cd949a847e612edf087a + GoogleDataTransport: e4085e6762f36a6141738f46b0153473ce57fb18 + GoogleToolboxForMac: 1350d40e86a76f7863928d63bcb0b89c84c521c5 + GoogleUserMessagingPlatform: 1d4b6946710d18cec34742054092e2c2bddae61f + GoogleUtilities: f734da554aade8cc7928a31c2f3311897933a1bd + GTMSessionFetcher: b3503b20a988c4e20cc189aa798fd18220133f52 Jet: 84fd0e2e9d49457fc04bc79b5d8857737a01c507 leveldb-library: 55d93ee664b4007aac644a782d11da33fba316f7 - nanopb: 59317e09cf1f1a0af72f12af412d54edf52603fc + nanopb: 1bf24dd71191072e120b83dd02d08f3da0d65e53 PersonalizedAdConsent: dbecabb3467df967c16d9cebc2ef4a8890e4bbd8 PromisesObjC: 8c196f5a328c2cba3e74624585467a557dcb482f Protobuf: 3dac39b34a08151c6d949560efe3f86134a3f748 @@ -1091,25 +760,24 @@ SPEC CHECKSUMS: React-RCTText: fae545b10cfdb3d247c36c56f61a94cfd6dba41d React-RCTVibration: 4356114dbcba4ce66991096e51a66e61eda51256 ReactCommon: ed4e11d27609d571e7eee8b65548efc191116eb3 - RNFBAdMob: 809f648889201406d333bc28a84bbf3294491f00 - RNFBAnalytics: 159651d6eae3c85db38ba5d694d8c6c46fd3883c - RNFBApp: e0fc0113eecc07f440f17639c9b7c59ea90bc583 - RNFBAuth: 16207757fa69ad50ec8ca04964f59cd560979294 - RNFBCrashlytics: c85d01c3fb3a4cc1e762facb9d4ad26b95f7f9dc - RNFBDatabase: 6c01157824702f4fc1cedf9b4b95e9f3154cfbf1 - RNFBDynamicLinks: 067d7419d8daf58b61faa70834b051410d5f6d4b - RNFBFirestore: 64986e129f4980e73b6e510684286d986367bef6 - RNFBFunctions: ae7a279090e902cdf4da7890d64f31a0e4e2a825 - RNFBIid: f40ac75229f8bb86cc6dd0c71b450e7df693f8f3 - RNFBInAppMessaging: dda5e571edd579042fa1d68a685012daa871a3f6 - RNFBMessaging: 1d2a6a249cf6b93bed1721befc42650c45615112 - RNFBMLNaturalLanguage: 3662457b2e95b857bb9289d250b0a10bc10aca84 - RNFBMLVision: c2547c24b59298ebe4b90a2025600d60a6929930 - RNFBPerf: 0c08e45726f7a19487b79cef3d12ee7e917c8b7a - RNFBRemoteConfig: 85df9235f46c20e293257b6e481412ac585e2966 - RNFBStorage: 72404d4977261c0d7060e87c3d0cf7f7e060c6a3 + RNFBAdMob: 9fde71cdebb34b76c3563058b262838f3865f037 + RNFBAnalytics: 20e6ac03857cab32df4511c41dbcac003cb1068a + RNFBApp: 28ebef9ae3051c4715c1e2397a5e9614f2ca8ffb + RNFBAuth: ce4d1e530ad01f460fa0a33e2dc7f95b41d0fc85 + RNFBCrashlytics: 9fb5e40ec1f07d04b4be21452d1caf5ed94ca3cc + RNFBDatabase: f0d3d475f1c13c4d166ec946db4e3416dcd40422 + RNFBDynamicLinks: 139d1cdf94467cb032050e32b95e9a2839f96f47 + RNFBFirestore: c011591967b4dc15dd881a51500f5547f98939e7 + RNFBFunctions: 442d72e42892dc6f6aa440080e10c46fdc488354 + RNFBIid: de74265f2b4becca364dadbdfae969db26ba0889 + RNFBInAppMessaging: 45729ef53490ebf9f82aea6829212ebcd9b61888 + RNFBMessaging: 15c6fc8a5d000fe6f6d171c8b7513acf88261f08 + RNFBML: 40bf23115d6a7b2648ffde19788cca83691d51a4 + RNFBPerf: c10267695cbc2012167b063a3c86a436d296b5b6 + RNFBRemoteConfig: 6615b43b44dff4e5b70378ad2d28fb35387ecd48 + RNFBStorage: 85c103535ef5aef331c7e99d8e9953189c8e3bba Yoga: 3ebccbdd559724312790e7742142d062476b698e -PODFILE CHECKSUM: 2b670e97319c5057f900292e8500c4c38d83aa3c +PODFILE CHECKSUM: 422b670abe2e89213edbefb2e24475c616f0b76e COCOAPODS: 1.10.0 diff --git a/tests/ios/testing.xcodeproj/project.pbxproj b/tests/ios/testing.xcodeproj/project.pbxproj index 94031f7522..dedcd9a23d 100644 --- a/tests/ios/testing.xcodeproj/project.pbxproj +++ b/tests/ios/testing.xcodeproj/project.pbxproj @@ -324,6 +324,10 @@ buildActionMask = 2147483647; files = ( ); + inputPaths = ( + "${DWARF_DSYM_FOLDER_PATH}/${DWARF_DSYM_FILE_NAME}/Contents/Resources/DWARF/${TARGET_NAME}", + "$(SRCROOT)/$(BUILT_PRODUCTS_DIR)/$(INFOPLIST_PATH)", + ); name = "[CP-User] [RNFB] Crashlytics Configuration"; runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; diff --git a/tests/package.json b/tests/package.json index 1bd5b15ca3..657f218767 100644 --- a/tests/package.json +++ b/tests/package.json @@ -20,8 +20,7 @@ "@react-native-firebase/iid": "7.4.10", "@react-native-firebase/in-app-messaging": "7.5.8", "@react-native-firebase/messaging": "7.9.2", - "@react-native-firebase/ml-natural-language": "7.4.11", - "@react-native-firebase/ml-vision": "7.4.13", + "@react-native-firebase/ml": "7.4.13", "@react-native-firebase/perf": "7.4.10", "@react-native-firebase/remote-config": "9.0.12", "@react-native-firebase/storage": "7.4.12", diff --git a/website/scripts/source-reference.js b/website/scripts/source-reference.js index 8a326aa7a1..41125b7625 100644 --- a/website/scripts/source-reference.js +++ b/website/scripts/source-reference.js @@ -163,18 +163,16 @@ function moduleNameToFullName(name) { return 'Instance ID'; case 'in-app-messaging': return 'In-App Messaging'; - case 'ml-natural-language': - return 'ML Kit Natural Language'; case 'messaging': return 'Cloud Messaging'; - case 'perf': + case 'ml': + return 'ML'; + case 'perf': return 'Performance Monitoring'; case 'remote-config': return 'Remote Config'; case 'storage': return 'Storage'; - case 'ml-vision': - return 'ML Kit Vision'; case 'app': return 'Core/App'; default: diff --git a/website/src/templates/utils.ts b/website/src/templates/utils.ts index 67d140d146..095770405a 100644 --- a/website/src/templates/utils.ts +++ b/website/src/templates/utils.ts @@ -45,11 +45,7 @@ function iconForModule(module: string): string { return '//static.invertase.io/assets/firebase/dynamic-links.svg'; case 'in-app-messaging': return '//static.invertase.io/assets/firebase/in-app-messaging.svg'; - case 'ml-natural-language': - return '//static.invertase.io/assets/firebase/ml-kit.svg'; - case 'ml-language': - return '//static.invertase.io/assets/firebase/ml-kit.svg'; - case 'ml-vision': + case 'ml': return '//static.invertase.io/assets/firebase/ml-kit.svg'; case 'remote-config': return '//static.invertase.io/assets/firebase/remote-config.svg';