Agora Broadcast live streaming black screen on ios browsers when starting a stream agora-rtc-sdk-ng react web app

13 views Asked by At

I'm working on a react web app , where users can start broadcast live streaming for their audiences. I'm using agora-rtc-sdk-ng version : 4.17.2 for that when i start a live on my computer browser or an android phone browser the live is working correctly as expected but when i switch to an iOS browser and start a live i got a black screen and the live is not started even if I'm authorizing the app to use the camera and microphone this is the code I'm using for my provider

import React, {createContext, useContext, useEffect, useMemo, useState} from "react";
import AgoraRTC, { ClientConfig, IAgoraRTCClient } from "agora-rtc-sdk-ng";
import { AGORA_APP_ID } from "./config";

type AppConfiguration = {
  agoraAppId: string,
  agoraEnable: boolean
};

type AgoraRTCProviderState = {
  client: IAgoraRTCClient | null,
  appConfiguration: AppConfiguration | null,
  config: ClientConfig | null
};

const AgoraContext = createContext<AgoraRTCProviderState>({ client: null, appConfiguration: null, config: null });

type props = {
  children: React.ReactNode,
  config: ClientConfig
};

export const AgoraProvider: React.FC<props> = ({ config, children}) => {
  const [client, setClient] = useState<IAgoraRTCClient | null>(null);
  const [appConfiguration, setAppConfiguration] = useState<AppConfiguration | null>(null);

  const onbeforeunload = () => {
    if (client) {
      client.removeAllListeners();
    }
  };

  useEffect(() => {
    if (AGORA_APP_ID)
      setAppConfiguration({
        agoraAppId: AGORA_APP_ID,
        agoraEnable: true
      });

    window.addEventListener("beforeunload", onbeforeunload);

    const _client = AgoraRTC.createClient(config);
    AgoraRTC.setLogLevel(0);
    if (_client)
      setClient(_client);

    return () => {
      window.removeEventListener("beforeunload", onbeforeunload);
    }
  }, [])

  const value = useMemo(
    () => ({ client, appConfiguration, config}),
    [client, appConfiguration, config]
  );

  return React.createElement(AgoraContext.Provider, { value }, children);
};

AgoraProvider.displayName = 'AgoraProvider';

export const useAgora = () => useContext(AgoraContext);

export default AgoraProvider;

this is the provider calling

...
<AgoraProvider config={{ mode: "live", codec: "h264", role: "host" }}>
      <Layout>
          <LivePubCom onStatusChanged={streamingStatusChanged} handleDuration={handleDuration} ref={publisherRef} />

......
      </Layout>
    </AgoraProvider>

this is the code of the LivePubComp :

const LivePublisherComponent = React.forwardRef(({ onStatusChanged, handleDuration }: props, ref: React.Ref<any>) => {
  const { client } = useAgora();
  const live = useSelector((state: StoreState) => state.lives.item);
  const token = useSelector((state: StoreState) => state.lives.token);
  const localTracks = useRef<LocalTracks>({ videoTrack: null, audioTrack: null });
  const clientRef = useRef<any>();
  let streamDurationIntervalRef = useRef<any>();
  const [tracks, setTracks] = useState<IAgoraTrack | null>(null);
  const [isStarted, setIsStarted] = useState<boolean>(false);
  const [errorMessage, setErrorMessage] = useState<string | null>(null);

  const publish = async () => {
    if (!client || !live || !AGORA_APP_ID || !live?.sessionId || !live.creator) return;

    await client.join(AGORA_APP_ID, live.sessionId, token, live.creator._id);

    const [microphoneTrack, cameraTrack] = await agoraUtil.createLocalTracks({}, { encoderConfig: { bitrateMax: 1000 } });
     
    await client.publish([microphoneTrack, cameraTrack]);

    setTracks({ tracks: [microphoneTrack, cameraTrack] });
    localTracks.current = { videoTrack: cameraTrack, audioTrack: microphoneTrack };

    

    onStatusChanged(true);
    setIsStarted(true);
    streamDurationIntervalRef.current && clearInterval(streamDurationIntervalRef.current);
    streamDurationIntervalRef.current = setInterval(() => {
      handleDuration(clientRef.current.getRTCStats().Duration);
    }, 1000);
  };

Does anyone face the same issue I read the doc for supported browsers and mine is supported

this is the provider calling

...
<AgoraProvider config={{ mode: "live", codec: "h264", role: "host" }}>
      <Layout>
          <LivePubCom onStatusChanged={streamingStatusChanged} handleDuration={handleDuration} ref={publisherRef} />

......
      </Layout>
    </AgoraProvider>

this is the code of the LivePubComp :

const LivePublisherComponent = React.forwardRef(({ onStatusChanged, handleDuration }: props, ref: React.Ref<any>) => {
  const { client } = useAgora();
  const live = useSelector((state: StoreState) => state.lives.item);
  const token = useSelector((state: StoreState) => state.lives.token);
  const localTracks = useRef<LocalTracks>({ videoTrack: null, audioTrack: null });
  const clientRef = useRef<any>();
  let streamDurationIntervalRef = useRef<any>();
  const [tracks, setTracks] = useState<IAgoraTrack | null>(null);
  const [isStarted, setIsStarted] = useState<boolean>(false);
  const [errorMessage, setErrorMessage] = useState<string | null>(null);

  const publish = async () => {
    if (!client || !live || !AGORA_APP_ID || !live?.sessionId || !live.creator) return;

    await client.join(AGORA_APP_ID, live.sessionId, token, live.creator._id);

    const [microphoneTrack, cameraTrack] = await agoraUtil.createLocalTracks({}, { encoderConfig: { bitrateMax: 1000 } });
     
    await client.publish([microphoneTrack, cameraTrack]);

    setTracks({ tracks: [microphoneTrack, cameraTrack] });
    localTracks.current = { videoTrack: cameraTrack, audioTrack: microphoneTrack };

    

    onStatusChanged(true);
    setIsStarted(true);
    streamDurationIntervalRef.current && clearInterval(streamDurationIntervalRef.current);
    streamDurationIntervalRef.current = setInterval(() => {
      handleDuration(clientRef.current.getRTCStats().Duration);
    }, 1000);
  };

Does anyone face the same issue I read the doc for supported browsers and mine is supported

0

There are 0 answers