Guest
(NOBRIDGE) FEHLER TypeError: Die Eigenschaft „install“ von null [Component Stack] kann nicht gelesen werden.
Post
by Guest » 05 Jan 2025, 13:27
Ich versuche, eine native React-App mit Expo zu erstellen und bin hier hängengeblieben
Wenn Sie das Problem verstehen, ignorieren Sie die folgenden Aussagen:
Ich entwickle eine React Native-App mit Expo, wo ich ein ONNX-Modell integrieren möchte Gesichtserkennung. Mit der App können Benutzer ein Bild auswählen, es vorverarbeiten und mithilfe der ONNX Runtime für React Native Inferenzen ausführen. Allerdings stoße ich während der Entwicklung auf mehrere Probleme:
Hier ist die Ausgabe, die ich in der Anwendung sehe:
Hier ist der Code, den ich in
index.jsx geschrieben habe ~ ich ersetzt
index.tsx mit
jsx :
Code: Select all
import React, { useState } from 'react';
import { View, Button, Image, Text, StyleSheet, Platform } from 'react-native';
import * as ImagePicker from 'expo-image-picker';
import { InferenceSession, Tensor } from 'onnxruntime-react-native';
import * as FileSystem from 'expo-file-system';
export default function App () {
const [imageUri, setImageUri] = useState(null);
const [output, setOutput] = useState(null);
// Load the ONNX model
const loadModel = async () => {
try {
const modelPath =
Platform.OS === 'android'
? `file:///android_asset/face_encoder.onnx` // Android path for assets
: `${FileSystem.documentDirectory}face_encoder.onnx`; // iOS path
// Load the ONNX model
const session = await InferenceSession.create(modelPath);
return session;
} catch (error) {
console.error('Error loading model:', error);
}
};
// Process image and run inference
const processImage = async (session) => {
if (!imageUri) return;
try {
// Convert image to tensor (mocked for simplicity)
// In practice, preprocess the image (resize to 224x224, normalize, etc.)
const inputTensor = new Float32Array(3 * 224 * 224).fill(0.5);
const tensor = new Tensor('float32', inputTensor, [1, 3, 224, 224]);
// Run inference
const feeds = { input: tensor };
const results = await session.run(feeds);
// Process results
setOutput(results.output.data);
} catch (error) {
console.error('Error processing image:', error);
}
};
const handleLoadImage = async () => {
const result = await ImagePicker.launchImageLibraryAsync({
mediaTypes: ImagePicker.MediaTypeOptions.Images,
allowsEditing: true,
aspect: [4, 3],
quality: 1,
});
if (!result.canceled) {
setImageUri(result.uri);
}
};
const handleRunModel = async () => {
const session = await loadModel();
if (session) {
await processImage(session);
}
};
return (
Face Recognition App
{imageUri && }
{output && Output: {JSON.stringify(output)}}
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: '#fff',
},
image: {
width: 200,
height: 200,
marginVertical: 20,
},
});
Hier ist die
app.json :
Code: Select all
{
"expo": {
"name": "FceDetectionApp",
"slug": "FceDetectionApp",
"version": "1.0.0",
"orientation": "portrait",
"icon": "./assets/images/icon.png",
"scheme": "myapp",
"userInterfaceStyle": "automatic",
"newArchEnabled": true,
"ios": {
"supportsTablet": true
},
"android": {
"adaptiveIcon": {
"foregroundImage": "./assets/images/adaptive-icon.png",
"backgroundColor": "#ffffff"
},
"package": "Face.Files"
},
"web": {
"bundler": "metro",
"output": "static",
"favicon": "./assets/images/favicon.png"
},
"plugins": [
"expo-router",
[
"expo-splash-screen",
{
"image": "./assets/images/splash-icon.png",
"imageWidth": 200,
"resizeMode": "contain",
"backgroundColor": "#ffffff"
}
]
],
"experiments": {
"typedRoutes": true
},
"extra": {
"router": {
"origin": false
},
"eas": {
"projectId": "81f38c43-7dd5-4bfc-ad5d-fbb101bd1d3a"
}
}
}
}
Package.json-Abhängigkeiten:
Code: Select all
"dependencies": {
"@expo/vector-icons": "^14.0.2",
"@react-navigation/bottom-tabs": "^7.2.0",
"@react-navigation/native": "^7.0.14",
"expo": "~52.0.23",
"expo-blur": "~14.0.1",
"expo-constants": "~17.0.3",
"expo-font": "~13.0.2",
"expo-haptics": "~14.0.0",
"expo-image-picker": "^16.0.3",
"expo-linking": "~7.0.3",
"expo-router": "~4.0.15",
"expo-splash-screen": "~0.29.18",
"expo-status-bar": "~2.0.0",
"expo-symbols": "~0.2.0",
"expo-system-ui": "~4.0.6",
"expo-web-browser": "~14.0.1",
"onnxruntime-react-native": "^1.20.1",
"react": "18.3.1",
"react-dom": "18.3.1",
"react-native": "0.76.5",
"react-native-gesture-handler": "~2.20.2",
"react-native-get-random-values": "~1.11.0",
"react-native-reanimated": "~3.16.1",
"react-native-safe-area-context": "4.12.0",
"react-native-screens": "~4.4.0",
"react-native-web": "~0.19.13",
"react-native-webview": "13.12.5"
},
"devDependencies": {
"@babel/core": "^7.25.2",
"@types/jest": "^29.5.12",
"@types/react": "~18.3.12",
"@types/react-test-renderer": "^18.3.0",
"jest": "^29.2.1",
"jest-expo": "~52.0.2",
"react-test-renderer": "18.3.1",
"typescript": "^5.3.3"
},
1736080078
Guest
Ich versuche, eine native React-App mit Expo zu erstellen und bin hier hängengeblieben [img]https://i.sstatic.net/2fOVDnlM.png[/img] [b]Wenn Sie das Problem verstehen, ignorieren Sie die folgenden Aussagen:[/b] Ich entwickle eine React Native-App mit Expo, wo ich ein ONNX-Modell integrieren möchte Gesichtserkennung. Mit der App können Benutzer ein Bild auswählen, es vorverarbeiten und mithilfe der ONNX Runtime für React Native Inferenzen ausführen. Allerdings stoße ich während der Entwicklung auf mehrere Probleme: Hier ist die Ausgabe, die ich in der Anwendung sehe: [img]https://i.sstatic.net/yvXJmE0w.png[/img] Hier ist der Code, den ich in [b]index.jsx geschrieben habe [/b] ~ ich ersetzt [b]index.tsx[/b] mit [b]jsx[/b]: [code]import React, { useState } from 'react'; import { View, Button, Image, Text, StyleSheet, Platform } from 'react-native'; import * as ImagePicker from 'expo-image-picker'; import { InferenceSession, Tensor } from 'onnxruntime-react-native'; import * as FileSystem from 'expo-file-system'; export default function App () { const [imageUri, setImageUri] = useState(null); const [output, setOutput] = useState(null); // Load the ONNX model const loadModel = async () => { try { const modelPath = Platform.OS === 'android' ? `file:///android_asset/face_encoder.onnx` // Android path for assets : `${FileSystem.documentDirectory}face_encoder.onnx`; // iOS path // Load the ONNX model const session = await InferenceSession.create(modelPath); return session; } catch (error) { console.error('Error loading model:', error); } }; // Process image and run inference const processImage = async (session) => { if (!imageUri) return; try { // Convert image to tensor (mocked for simplicity) // In practice, preprocess the image (resize to 224x224, normalize, etc.) const inputTensor = new Float32Array(3 * 224 * 224).fill(0.5); const tensor = new Tensor('float32', inputTensor, [1, 3, 224, 224]); // Run inference const feeds = { input: tensor }; const results = await session.run(feeds); // Process results setOutput(results.output.data); } catch (error) { console.error('Error processing image:', error); } }; const handleLoadImage = async () => { const result = await ImagePicker.launchImageLibraryAsync({ mediaTypes: ImagePicker.MediaTypeOptions.Images, allowsEditing: true, aspect: [4, 3], quality: 1, }); if (!result.canceled) { setImageUri(result.uri); } }; const handleRunModel = async () => { const session = await loadModel(); if (session) { await processImage(session); } }; return ( Face Recognition App {imageUri && } {output && Output: {JSON.stringify(output)}} ); }; const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center', backgroundColor: '#fff', }, image: { width: 200, height: 200, marginVertical: 20, }, }); [/code] Hier ist die [b]app.json[/b]: [code]{ "expo": { "name": "FceDetectionApp", "slug": "FceDetectionApp", "version": "1.0.0", "orientation": "portrait", "icon": "./assets/images/icon.png", "scheme": "myapp", "userInterfaceStyle": "automatic", "newArchEnabled": true, "ios": { "supportsTablet": true }, "android": { "adaptiveIcon": { "foregroundImage": "./assets/images/adaptive-icon.png", "backgroundColor": "#ffffff" }, "package": "Face.Files" }, "web": { "bundler": "metro", "output": "static", "favicon": "./assets/images/favicon.png" }, "plugins": [ "expo-router", [ "expo-splash-screen", { "image": "./assets/images/splash-icon.png", "imageWidth": 200, "resizeMode": "contain", "backgroundColor": "#ffffff" } ] ], "experiments": { "typedRoutes": true }, "extra": { "router": { "origin": false }, "eas": { "projectId": "81f38c43-7dd5-4bfc-ad5d-fbb101bd1d3a" } } } } [/code] Package.json-Abhängigkeiten: [code] "dependencies": { "@expo/vector-icons": "^14.0.2", "@react-navigation/bottom-tabs": "^7.2.0", "@react-navigation/native": "^7.0.14", "expo": "~52.0.23", "expo-blur": "~14.0.1", "expo-constants": "~17.0.3", "expo-font": "~13.0.2", "expo-haptics": "~14.0.0", "expo-image-picker": "^16.0.3", "expo-linking": "~7.0.3", "expo-router": "~4.0.15", "expo-splash-screen": "~0.29.18", "expo-status-bar": "~2.0.0", "expo-symbols": "~0.2.0", "expo-system-ui": "~4.0.6", "expo-web-browser": "~14.0.1", "onnxruntime-react-native": "^1.20.1", "react": "18.3.1", "react-dom": "18.3.1", "react-native": "0.76.5", "react-native-gesture-handler": "~2.20.2", "react-native-get-random-values": "~1.11.0", "react-native-reanimated": "~3.16.1", "react-native-safe-area-context": "4.12.0", "react-native-screens": "~4.4.0", "react-native-web": "~0.19.13", "react-native-webview": "13.12.5" }, "devDependencies": { "@babel/core": "^7.25.2", "@types/jest": "^29.5.12", "@types/react": "~18.3.12", "@types/react-test-renderer": "^18.3.0", "jest": "^29.2.1", "jest-expo": "~52.0.2", "react-test-renderer": "18.3.1", "typescript": "^5.3.3" }, [/code]