Cross-platform JavaScript/TypeScript
npm install @turbina/react-nativeyarn add @turbina/react-nativeInstall native dependencies:
cd ios && pod install && cd ..No additional steps required. The SDK will auto-link.
import React, { useEffect } from 'react';
import { View, Text, StyleSheet } from 'react-native';
import Turbina from '@turbina/react-native';
export default function App() {
useEffect(() => {
initTurbina();
}, []);
const initTurbina = async () => {
try {
const result = await Turbina.initialize({
licenseKey: 'trb_live_sk_your_license_key',
appId: 'com.yourapp.id',
});
console.log('Turbina initialized:', result.tier);
console.log('Features:', result.features);
} catch (error) {
console.error('Failed to initialize:', error);
}
};
return (
<View style={styles.container}>
<Text style={styles.title}>Turbina SDK Ready!</Text>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
},
title: {
fontSize: 24,
fontWeight: 'bold',
},
});import React, { useState, useRef } from 'react';
import { View, TouchableOpacity, Text, StyleSheet } from 'react-native';
import { TurbinaCamera, VideoFilter, FaceEffect } from '@turbina/react-native';
export default function CameraScreen() {
const cameraRef = useRef(null);
const [isRecording, setIsRecording] = useState(false);
const [currentFilter, setCurrentFilter] = useState(VideoFilter.NONE);
const handleRecord = async () => {
if (isRecording) {
const video = await cameraRef.current?.stopRecording();
console.log('Video saved:', video.path);
setIsRecording(false);
} else {
await cameraRef.current?.startRecording();
setIsRecording(true);
}
};
const applyFilter = (filter) => {
cameraRef.current?.applyFilter(filter);
setCurrentFilter(filter);
};
return (
<View style={styles.container}>
<TurbinaCamera
ref={cameraRef}
style={styles.camera}
cameraType="front"
onInitialized={() => console.log('Camera ready')}
onError={(error) => console.error('Camera error:', error)}
/>
{/* Filter Buttons */}
<View style={styles.filters}>
<TouchableOpacity
style={styles.filterButton}
onPress={() => applyFilter(VideoFilter.VINTAGE)}
>
<Text style={styles.filterText}>Vintage</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.filterButton}
onPress={() => applyFilter(VideoFilter.CINEMATIC)}
>
<Text style={styles.filterText}>Cinematic</Text>
</TouchableOpacity>
<TouchableOpacity
style={styles.filterButton}
onPress={() => applyFilter(VideoFilter.NONE)}
>
<Text style={styles.filterText}>None</Text>
</TouchableOpacity>
</View>
{/* Record Button */}
<TouchableOpacity
style={[styles.recordButton, isRecording && styles.recordingButton]}
onPress={handleRecord}
>
<View style={[styles.recordInner, isRecording && styles.recordingInner]} />
</TouchableOpacity>
</View>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'black',
},
camera: {
flex: 1,
},
filters: {
position: 'absolute',
top: 60,
left: 0,
right: 0,
flexDirection: 'row',
justifyContent: 'center',
gap: 10,
},
filterButton: {
backgroundColor: 'rgba(255, 255, 255, 0.2)',
paddingHorizontal: 16,
paddingVertical: 8,
borderRadius: 20,
},
filterText: {
color: 'white',
fontWeight: '600',
},
recordButton: {
position: 'absolute',
bottom: 40,
alignSelf: 'center',
width: 80,
height: 80,
borderRadius: 40,
backgroundColor: 'rgba(255, 255, 255, 0.3)',
justifyContent: 'center',
alignItems: 'center',
},
recordingButton: {
backgroundColor: 'rgba(255, 0, 0, 0.3)',
},
recordInner: {
width: 60,
height: 60,
borderRadius: 30,
backgroundColor: 'red',
},
recordingInner: {
width: 30,
height: 30,
borderRadius: 4,
},
});import React, { useRef } from 'react';
import { View, TouchableOpacity, Text } from 'react-native';
import { TurbinaCamera, FaceEffect } from '@turbina/react-native';
export default function FaceEffectsScreen() {
const cameraRef = useRef(null);
const enableFaceTracking = async () => {
await cameraRef.current?.enableFaceTracking();
console.log('Face tracking enabled');
};
const applyFaceEffect = async (effect, intensity = 0.7) => {
await cameraRef.current?.applyFaceEffect(effect, intensity);
};
return (
<View style={{ flex: 1 }}>
<TurbinaCamera
ref={cameraRef}
style={{ flex: 1 }}
cameraType="front"
/>
<View style={{ position: 'absolute', bottom: 20, flexDirection: 'row', gap: 10 }}>
<TouchableOpacity onPress={enableFaceTracking}>
<Text>Enable Face Tracking</Text>
</TouchableOpacity>
<TouchableOpacity onPress={() => applyFaceEffect(FaceEffect.SMOOTH_SKIN)}>
<Text>Smooth Skin</Text>
</TouchableOpacity>
<TouchableOpacity onPress={() => applyFaceEffect(FaceEffect.BIG_EYES)}>
<Text>Big Eyes</Text>
</TouchableOpacity>
<TouchableOpacity onPress={() => applyFaceEffect(FaceEffect.SLIM_FACE)}>
<Text>Slim Face</Text>
</TouchableOpacity>
</View>
</View>
);
}The SDK is fully typed. Here's an example:
import Turbina, {
TurbinaInitResult,
VideoFilter,
FaceEffect,
RecordingOptions,
TurbinaError
} from '@turbina/react-native';
// Initialize with type safety
const initResult: TurbinaInitResult = await Turbina.initialize({
licenseKey: 'trb_live_sk_your_key',
appId: 'com.yourapp.id',
});
console.log('Tier:', initResult.tier); // 'FREE' | 'STARTER' | 'PRO' | 'ENTERPRISE'
console.log('Features:', initResult.features); // Fully typed features object
// Recording with options
const recordingOptions: RecordingOptions = {
quality: 'high',
fps: 30,
maxDuration: 60, // seconds
};
await camera.current?.startRecording(recordingOptions);<key>NSCameraUsageDescription</key>
<string>We need camera access to record videos with effects</string>
<key>NSMicrophoneUsageDescription</key>
<string>We need microphone access to record audio</string>
<key>NSPhotoLibraryAddUsageDescription</key>
<string>We need permission to save videos to your library</string><uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />import { PermissionsAndroid, Platform } from 'react-native';
async function requestPermissions() {
if (Platform.OS === 'android') {
const granted = await PermissionsAndroid.requestMultiple([
PermissionsAndroid.PERMISSIONS.CAMERA,
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
PermissionsAndroid.PERMISSIONS.WRITE_EXTERNAL_STORAGE,
]);
return (
granted['android.permission.CAMERA'] === PermissionsAndroid.RESULTS.GRANTED &&
granted['android.permission.RECORD_AUDIO'] === PermissionsAndroid.RESULTS.GRANTED
);
}
return true; // iOS handles permissions automatically
}You're ready to build! Check out these resources: