Google cloud speech to text streaming GRPC module
npm install react-native-google-cloud-speech-to-textGoogle cloud speech to text streaming GRPC module
> By default, module read google cloud api key from google services json file. So android may need to Set Up Google Play Services. and make sure your api key can access to Cloud services.
> API key can also replace by setApiKey() method in js code.
``sh`
yarn add react-native-google-cloud-speech-to-text
#### android
- Don't forget request RECORD_AUDIO PERMISSION before start recognize
#### IOS
- Coming soon!
`js
import * as React from 'react';
import {
StyleSheet,
View,
Text,
Button,
SafeAreaView,
PermissionsAndroid,
} from 'react-native';
import GoogleCloudSpeechToText, {
SpeechRecognizeEvent,
VoiceStartEvent,
SpeechErrorEvent,
VoiceEvent,
SpeechStartEvent,
} from 'react-native-google-cloud-speech-to-text';
import { useEffect } from 'react';
const Separator = () =>
export default function App() {
const [transcript, setResult] = React.useState
useEffect(() => {
PermissionsAndroid.request(PermissionsAndroid.PERMISSIONS.RECORD_AUDIO, {
title: 'Cool Photo App Camera Permission',
message:
'Cool Photo App needs access to your camera ' +
'so you can take awesome pictures.',
buttonNeutral: 'Ask Me Later',
buttonNegative: 'Cancel',
buttonPositive: 'OK',
});
}, []);
useEffect(() => {
// GoogleCloudSpeechToText.setApiKey('key_____');
GoogleCloudSpeechToText.onVoice(onVoice);
GoogleCloudSpeechToText.onVoiceStart(onVoiceStart);
GoogleCloudSpeechToText.onVoiceEnd(onVoiceEnd);
GoogleCloudSpeechToText.onSpeechError(onSpeechError);
GoogleCloudSpeechToText.onSpeechRecognized(onSpeechRecognized);
GoogleCloudSpeechToText.onSpeechRecognizing(onSpeechRecognizing);
return () => {
GoogleCloudSpeechToText.removeListeners();
};
}, []);
const onSpeechError = (_error: SpeechErrorEvent) => {
console.log('onSpeechError: ', _error);
};
const onSpeechRecognized = (result: SpeechRecognizeEvent) => {
console.log('onSpeechRecognized: ', result);
setResult(result.transcript);
};
const onSpeechRecognizing = (result: SpeechRecognizeEvent) => {
console.log('onSpeechRecognizing: ', result);
setResult(result.transcript);
};
const onVoiceStart = (_event: VoiceStartEvent) => {
console.log('onVoiceStart', _event);
};
const onVoice = (_event: VoiceEvent) => {
console.log('onVoice', _event);
};
const onVoiceEnd = () => {
console.log('onVoiceEnd: ');
};
const startRecognizing = async () => {
const result: SpeechStartEvent = await GoogleCloudSpeechToText.start({
speechToFile: true,
});
console.log('startRecognizing', result);
};
const stopRecognizing = async () => {
await GoogleCloudSpeechToText.stop();
};
return (
Adjust the color in a way that looks standard on each platform. On
iOS, the color prop controls the color of the text. On Android, the
color adjusts the background color of the button.
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
marginHorizontal: 16,
},
title: {
textAlign: 'center',
marginVertical: 8,
},
fixToText: {
flexDirection: 'row',
justifyContent: 'space-between',
},
separator: {
marginVertical: 8,
borderBottomColor: '#737373',
borderBottomWidth: StyleSheet.hairlineWidth,
},
});
``
See the contributing guide to learn how to contribute to the repository and the development workflow.
MIT