Erhalten Sie einen schwarzen Bildschirm beim Streamieren von Videos von der Android -Kamera zu einem Webbrowser über Web

Post a reply

Smilies
:) :( :oops: :chelo: :roll: :wink: :muza: :sorry: :angel: :read: *x) :clever:
View more smilies

BBCode is ON
[img] is ON
[flash] is OFF
[url] is ON
Smilies are ON

Topic review
   

Expand view Topic review: Erhalten Sie einen schwarzen Bildschirm beim Streamieren von Videos von der Android -Kamera zu einem Webbrowser über Web

by Anonymous » 17 Mar 2025, 14:22

Ich habe ein Android -Telefon und möchte es in einem Webbrowser streamen. Mein Webbrowser, obwohl er am Telefon sichtbar ist. < /p>
Hier habe ich bisher versucht.

Code: Select all

// Code on android phone.
private void handlePeerConnection(SurfaceViewRenderer surfaceViewRenderer) {
PeerConnectionFactory.InitializationOptions initializationOptions
= PeerConnectionFactory.InitializationOptions.builder(requireContext().getApplicationContext())
.setEnableInternalTracer(true)
.setInjectableLogger((message, severity, tag) -> {
Log.d("WEBRTC_INJ."+tag, message);
}, Logging.Severity.LS_VERBOSE)
.createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);

PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
PeerConnectionFactory.Builder peerConnectionFactoryBuilder = PeerConnectionFactory.builder()
.setOptions(options);

DefaultVideoEncoderFactory defaultVideoEncoderFactory = new DefaultVideoEncoderFactory(
eglBaseContext, true, true);
DefaultVideoDecoderFactory defaultVideoDecoderFactory = new DefaultVideoDecoderFactory(
eglBaseContext);

peerConnectionFactoryBuilder.setVideoEncoderFactory(defaultVideoEncoderFactory);
peerConnectionFactoryBuilder.setVideoDecoderFactory(defaultVideoDecoderFactory);

PeerConnectionFactory peerConnectionFactory = peerConnectionFactoryBuilder.createPeerConnectionFactory();

PeerConnection.RTCConfiguration rtcConfig = new PeerConnection.RTCConfiguration(new ArrayList());
peerConnection = peerConnectionFactory.createPeerConnection(rtcConfig, new CustomPeerConnectionObserver() {

@Override
public void onIceCandidate(IceCandidate iceCandidate) {
super.onIceCandidate(iceCandidate);
try {

Log.d("MY_WEBRTC", "onIceCandidate sending");
JSONObject object = new JSONObject();
object.put("type", "webrtc-ice-candidate");
object.put("label", iceCandidate.sdpMLineIndex);
object.put("id", iceCandidate.sdpMid);
object.put("candidate", iceCandidate.sdp);

// this just send json data to web browser
user.sendWebSocketMessage(InfoWSSession.path,object.toString());

} catch (Exception e) {
Log.e("MY_WEBRTC", "onIceCandidate().  Error: " + e);
}
}

@Override
public void onRenegotiationNeeded() {
Log.d("MY_WEBRTC", "onRenegotiationNeeded");
}
});

Data.webRTCIceCandidate.observe(getViewLifecycleOwner(), (v) -> {
peerConnection.addIceCandidate(v);
});
Data.webRTCSessionDescription.observe(getViewLifecycleOwner(), (v) ->  {
peerConnection.setRemoteDescription(new CustomSDPObserver(), v);
});

VideoSource videoSource = peerConnectionFactory.createVideoSource(false);
SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create("CaptureThread", eglBaseContext);
VideoCapturer videoCapturer = createCameraCapturer(new Camera1Enumerator(false));
if( videoCapturer == null ) {
Log.d("MYTAG", "videoCapturer is null");
}
videoCapturer.initialize(surfaceTextureHelper, requireContext(), videoSource.getCapturerObserver());

VideoTrack localVideoTrack = peerConnectionFactory.createVideoTrack("101", videoSource);
localVideoTrack.setEnabled(true);

peerConnection.addTrack(localVideoTrack);
localVideoView = surfaceViewRenderer;
localVideoView.setMirror(false);
localVideoView.init(eglBaseContext, null);
localVideoView.setZOrderMediaOverlay(false);
localVideoTrack.addSink(localVideoView);
videoCapturer.startCapture(1280, 720, 30);

MediaConstraints sdpConstraints = new MediaConstraints();
sdpConstraints.mandatory.add(
new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "0"));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "0"));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxHeight", Integer.toString(720)));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxWidth", Integer.toString(1280)));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("maxFrameRate", Integer.toString(60)));
sdpConstraints.mandatory.add(new MediaConstraints.KeyValuePair("minFrameRate", Integer.toString(30)));
peerConnection.createOffer(new CustomSDPObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
super.onCreateSuccess(sessionDescription);
peerConnection.setLocalDescription(new CustomSDPObserver(), sessionDescription);
try {
JSONObject obj = new JSONObject();
obj.put("type", "webrtc-offer");
obj.put("sdptype", sessionDescription.type.canonicalForm());
obj.put("sdp", sessionDescription.description);
user.sendWebSocketMessage(InfoWSSession.path, obj.toString());
} catch (Exception e) {
}
}
}, sdpConstraints);

< /code>
Dies ist der Signal-Server-Teil < /p>
    public void onMessage(WebSocket socket, String data) {
try {
JSONObject object = new JSONObject(data);
String type = object.getString("type");
switch (type) {
case "webrtc-answer":
String sdp = object.optString("sdp");
String sdptypeStr = object.optString("sdptype");
SessionDescription sessionDescription = new SessionDescription(SessionDescription.Type.fromCanonicalForm(sdptypeStr), sdp);
Data.webRTCSessionDescription.postValue(sessionDescription);
break;
case "webrtc-ice-candidate":
JSONObject iceCandidateObj = object.optJSONObject("candidate");
String sdpMid = "";
int sdpMLineIndex = 0;
String candidate = "";
if( iceCandidateObj != null ) {
sdpMid = iceCandidateObj.optString("sdpMid");
sdpMLineIndex = iceCandidateObj.optInt("sdpMLineIndex");
candidate = iceCandidateObj.optString("candidate");
}
IceCandidate iceCandidate = new IceCandidate(sdpMid, sdpMLineIndex,  candidate);
Data.webRTCIceCandidate.forcePostValue(iceCandidate);
break;
case "informing-ready":
Data.webRTCStartOffer.postValue(null);
break;
}
} catch (JSONException e) {
}

}

< /code>
Und dies ist der Code im Webbrowser, der in TypeScript < /p>
 geschrieben wurde.const videoElement = document.getElementById("remote-video") as HTMLVideoElement;
const infoWS = new WebSocket("/info");
let peerConn: RTCPeerConnection;
infoWS.addEventListener("message", function (event) {
const data = JSON.parse(event.data);
const type = data.type;
if (type === "webrtc-offer") {
const rtcOffer = data;
handleRTCOfferMsg(rtcOffer);
} else if (type === "webrtc-ice-candidate") {
peerConn.addIceCandidate(new RTCIceCandidate({ candidate: data.candidate, sdpMid: data.id, sdpMLineIndex: data.label }));
}
})

/*  functions */
function createPeerConnection(): RTCPeerConnection {
peerConn = new RTCPeerConnection();
peerConn.addEventListener('icecandidate', function (event) {
const candidate = event.candidate;
const iceCandidateMessage = {
type: "webrtc-ice-candidate",
candidate: candidate
};
infoWS.send(JSON.stringify(iceCandidateMessage));
});

peerConn.addEventListener("negotiationneeded", async function (e) {
});

peerConn.addEventListener("track", function (event) {
console.log("ontrack is called");
videoElement.srcObject = event.streams[0];
});

return peerConn;
}

async function handleRTCOfferMsg(offer) {
try {
const peerConn = createPeerConnection();
const desc = new RTCSessionDescription({ sdp: offer.sdp, type: offer.sdptype as any });
await peerConn.setRemoteDescription(desc);
const answer = await peerConn.createAnswer({ 'mandatory': { 'OfferToReceiveAudio': 0, 'OfferToReceiveVideo': 1 } });
await peerConn.setLocalDescription(answer);
const answerMsg = {
type: "webrtc-answer",
sdp: peerConn.localDescription?.sdp,
sdptype: peerConn.localDescription?.type
};
infoWS.send(JSON.stringify(answerMsg));
} catch (e) {
alert("Error handleRTCOfferMsg(): " + e)
}
}
}
Ich habe dieses Verhalten beim Debugging bemerkt. /> Entschuldigung, wenn der Code so lang ist.>

Top