Below is the code for using the secondary front-facing camera in Unity for Android for Agora.io.
See the Update function for pushing the image frame from the custom camera - the selected WebCamTexture.
using Agora.Rtc;
using Cysharp.Threading.Tasks;
using System;
using System.Collections.Generic;
using UnityEngine;
public class VideoCallTest : MonoBehaviour
{
// Fill in your app ID.
private string _appID = "";
// Fill in your channel name.
private string _channelName = "testChannel";
// Fill in the temporary token you obtained from
Agora Console.
private string _token = "";
[SerializeField] VideoSurface LocalView;
//internal VideoSurface RemoteView;
internal IRtcEngine RtcEngine;
//------------
private Texture2D _texture2D;
//private Rect _rect;
private WebCamTexture _webCameraTexture;
//public RawImage RawImage;
public Vector2 CameraSize = new Vector2(480, 480);
public int CameraFPS = 15;
public ChannelMediaOptions options = new
ChannelMediaOptions();
//-------------
private void Start()
{
// My own class for handling the channels
VideoCallManager.VideoChannelId
channelId = new
VideoCallManager.VideoChannelId(new ChannelManager.ChannelId { Name = _channelName, Users
= new List<int>() { 100, 103 }
});
UniTask.Void(async () =>
{
await channelId.RefreshToken(); // My own method to
get the token for the channel
_token = channelId.Token;
SetupVideoSDKEngine();
InitEventHandler();
//--------------
var ret = RtcEngine.SetExternalVideoSource(true, false,
EXTERNAL_VIDEO_SOURCE_TYPE.VIDEO_FRAME, new SenderOptions());
Debug.Log("SetExternalVideoSource
returns:" + ret);
WebCamDevice[] devices =
WebCamTexture.devices;
_webCameraTexture = new
WebCamTexture(devices[0].name, (int)CameraSize.x, (int)CameraSize.y, CameraFPS);
_webCameraTexture.Play();
Debug.Log("(devices[0].name:" + devices[0].name);
//-------------
Join();
});
}
private void SetupVideoSDKEngine()
{
// Create an instance of the video SDK.
RtcEngine =
Agora.Rtc.RtcEngine.CreateAgoraRtcEngine();
// Specify the context configuration to initialize
the created instance.
RtcEngineContext context = new
RtcEngineContext(_appID, 0,
CHANNEL_PROFILE_TYPE.CHANNEL_PROFILE_COMMUNICATION,
AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_DEFAULT, AREA_CODE.AREA_CODE_GLOB, null);
// Initialize the instance.
RtcEngine.Initialize(context);
}
private void InitEventHandler()
{
// Creates a UserEventHandler instance.
UserEventHandler handler = new UserEventHandler(this);
RtcEngine.InitEventHandler(handler);
}
internal class UserEventHandler : IRtcEngineEventHandler
{
private readonly VideoCallTest _videoSample;
internal UserEventHandler(VideoCallTest videoSample)
{
_videoSample = videoSample;
}
// This callback is triggered when the local user
joins the channel.
public override void OnJoinChannelSuccess(RtcConnection connection, int elapsed)
{
Debug.Log("You joined
channel: " + connection.channelId);
}
}
public void Join()
{
// Enable the video module.
RtcEngine.EnableVideo();
// Set the user role as broadcaster.
RtcEngine.SetClientRole(CLIENT_ROLE_TYPE.CLIENT_ROLE_BROADCASTER);
// Set the local video view.
//LocalView.SetForUser(0, "",
VIDEO_SOURCE_TYPE.VIDEO_SOURCE_CAMERA);
//-------------
LocalView.SetForUser(0, "",
VIDEO_SOURCE_TYPE.VIDEO_SOURCE_CUSTOM);
//-------------
// Start rendering local video.
LocalView.SetEnable(true);
// Join a channel.
RtcEngine.JoinChannel(_token,
_channelName);
//------------------
options.publishCameraTrack.SetValue(false); // Disable publishing
camera track.
options.publishCustomVideoTrack.SetValue(true); // Enable publishing
custom video track.
RtcEngine.UpdateChannelMediaOptions(options);// Update channel
media option to play the custom video track.
//------------------
}
private void Update()
{
if (_webCameraTexture == null || !_webCameraTexture.isPlaying) return;
if (_texture2D == null)
_texture2D = new
Texture2D(_webCameraTexture.width, _webCameraTexture.height);
Color[] pixels =
_webCameraTexture.GetPixels();
_texture2D.SetPixels(pixels);
_texture2D.Apply();
int width = _texture2D.width;
int height = _texture2D.height;
// Convert Texture2D to a byte array
byte[] textureBytes = _texture2D.GetRawTextureData();
// Create an external video frame
ExternalVideoFrame externalVideoFrame =
new
ExternalVideoFrame();
externalVideoFrame.type =
VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
externalVideoFrame.format =
VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_RGBA;
externalVideoFrame.buffer =
textureBytes;
externalVideoFrame.stride = width;
externalVideoFrame.height = height;
externalVideoFrame.timestamp =
DateTime.Now.Ticks / 10000; // Convert to ms
RtcEngine.PushVideoFrame(externalVideoFrame);
}
public void Leave()
{
// Leaves the channel.
RtcEngine.LeaveChannel();
// Disable the video modules.
RtcEngine.DisableVideo();
// Stops rendering the remote video.
//RemoteView.SetEnable(false);
// Stops rendering the local video.
LocalView.SetEnable(false);
}
void OnApplicationQuit()
{
if (RtcEngine != null)
{
Leave();
RtcEngine.Dispose();
RtcEngine = null;
}
}
}
Comments
Post a Comment