Skip to content
7 changes: 7 additions & 0 deletions LICENSE.txt.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions Runtime/Scripts/Video.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

155 changes: 155 additions & 0 deletions Runtime/Scripts/Video/YuvToRgbConverter.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
using System;
using UnityEngine;

namespace LiveKit
{
// Converts I420 YUV frames to RGBA into an output RenderTexture, via GPU shader or CPU fallback.
internal sealed class YuvToRgbConverter : IDisposable
{
public bool UseGpuShader { get; set; } = true;
public RenderTexture Output { get; private set; }

private Material _yuvToRgbMaterial;
private Texture2D _planeY;
private Texture2D _planeU;
private Texture2D _planeV;

// Ensure Output exists and matches the given size; returns true if created or resized.
public bool EnsureOutput(int width, int height)
{
var changed = false;
if (Output == null || Output.width != width || Output.height != height)
{
if (Output != null)
{
Output.Release();
UnityEngine.Object.Destroy(Output);
}
Output = new RenderTexture(width, height, 0, RenderTextureFormat.ARGB32);
Output.Create();
changed = true;
}
return changed;
}

// Convert the given buffer to RGBA and write into Output.
public void Convert(VideoFrameBuffer buffer)
{
if (buffer == null || !buffer.IsValid)
return;

int width = (int)buffer.Width;
int height = (int)buffer.Height;

EnsureOutput(width, height);

if (UseGpuShader)
{
EnsureGpuMaterial();
EnsureYuvPlaneTextures(width, height);
UploadYuvPlanes(buffer);

if (_yuvToRgbMaterial != null)
{
GpuConvertToRenderTarget();
return;
}
// fall through to CPU if shader missing
}

CpuConvertToRenderTarget(buffer, width, height);
}

// Release all Unity resources (RT, material, textures).
public void Dispose()
{
if (_planeY != null) UnityEngine.Object.Destroy(_planeY);
if (_planeU != null) UnityEngine.Object.Destroy(_planeU);
if (_planeV != null) UnityEngine.Object.Destroy(_planeV);
if (Output != null)
{
Output.Release();
UnityEngine.Object.Destroy(Output);
}
if (_yuvToRgbMaterial != null) UnityEngine.Object.Destroy(_yuvToRgbMaterial);
}

// Ensure the GPU YUV->RGB material exists.
private void EnsureGpuMaterial()
{
if (_yuvToRgbMaterial == null)
{
var shader = Shader.Find("Hidden/LiveKit/YUV2RGB");
if (shader != null)
_yuvToRgbMaterial = new Material(shader);
}
}

// Ensure or recreate a plane texture with given format and filter settings.
private static void EnsurePlaneTexture(ref Texture2D tex, int width, int height, TextureFormat format, FilterMode filterMode)
{
if (tex == null || tex.width != width || tex.height != height)
{
if (tex != null) UnityEngine.Object.Destroy(tex);
tex = new Texture2D(width, height, format, false, true);
tex.filterMode = filterMode;
tex.wrapMode = TextureWrapMode.Clamp;
}
}

// Ensure Y, U, V plane textures exist with correct dimensions.
private void EnsureYuvPlaneTextures(int width, int height)
{
EnsurePlaneTexture(ref _planeY, width, height, TextureFormat.R8, FilterMode.Bilinear);
var chromaW = width / 2;
var chromaH = height / 2;
EnsurePlaneTexture(ref _planeU, chromaW, chromaH, TextureFormat.R8, FilterMode.Bilinear);
EnsurePlaneTexture(ref _planeV, chromaW, chromaH, TextureFormat.R8, FilterMode.Bilinear);
}

// Upload raw Y, U, V plane bytes from buffer to textures.
private void UploadYuvPlanes(VideoFrameBuffer buffer)
{
var info = buffer.Info;
if (info.Components.Count < 3) return;
var yComp = info.Components[0];
var uComp = info.Components[1];
var vComp = info.Components[2];

_planeY.LoadRawTextureData((IntPtr)yComp.DataPtr, (int)yComp.Size);
_planeY.Apply(false, false);
_planeU.LoadRawTextureData((IntPtr)uComp.DataPtr, (int)uComp.Size);
_planeU.Apply(false, false);
_planeV.LoadRawTextureData((IntPtr)vComp.DataPtr, (int)vComp.Size);
_planeV.Apply(false, false);
}

// CPU-side conversion to RGBA and blit to the output render target.
private void CpuConvertToRenderTarget(VideoFrameBuffer buffer, int width, int height)
{
var rgba = buffer.ToRGBA();
var tempTex = new Texture2D(width, height, TextureFormat.RGBA32, false);
try
{
tempTex.LoadRawTextureData((IntPtr)rgba.Info.DataPtr, (int)rgba.GetMemorySize());
tempTex.Apply();
Graphics.Blit(tempTex, Output);
}
finally
{
UnityEngine.Object.Destroy(tempTex);
rgba.Dispose();
}
}

// GPU-side YUV->RGB conversion using shader material.
private void GpuConvertToRenderTarget()
{
_yuvToRgbMaterial.SetTexture("_TexY", _planeY);
_yuvToRgbMaterial.SetTexture("_TexU", _planeU);
_yuvToRgbMaterial.SetTexture("_TexV", _planeV);
Graphics.Blit(Texture2D.blackTexture, Output, _yuvToRgbMaterial);
}
}
}

2 changes: 2 additions & 0 deletions Runtime/Scripts/Video/YuvToRgbConverter.cs.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

32 changes: 15 additions & 17 deletions Runtime/Scripts/VideoStream.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,14 @@ namespace LiveKit
public class VideoStream
{
public delegate void FrameReceiveDelegate(VideoFrame frame);
public delegate void TextureReceiveDelegate(Texture2D tex2d);
public delegate void TextureReceiveDelegate(Texture tex);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this constitutes a minor breaking API change—are we able to use Texture2D here? If not, we can note this in the release notes.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried but seems to affect performance to convert back to a Texture2D. Let's make a note that this is changed.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

On the app side, the texture is still used the same way w/o any modifications.

public delegate void TextureUploadDelegate();

internal readonly FfiHandle Handle;
private VideoStreamInfo _info;
private bool _disposed = false;
private bool _dirty = false;
private YuvToRgbConverter _converter;

/// Called when we receive a new frame from the VideoTrack
public event FrameReceiveDelegate FrameReceived;
Expand All @@ -29,7 +30,7 @@ public class VideoStream

/// The texture changes every time the video resolution changes.
/// Can be null if UpdateRoutine isn't started
public Texture2D Texture { private set; get; }
public RenderTexture Texture { private set; get; }
public VideoFrameBuffer VideoBuffer { private set; get; }

protected bool _playing = false;
Expand Down Expand Up @@ -70,8 +71,14 @@ private void Dispose(bool disposing)
if (!_disposed)
{
if (disposing)
{
VideoBuffer?.Dispose();
if (Texture != null) UnityEngine.Object.Destroy(Texture);
}
// Unity objects must be destroyed on main thread
_converter?.Dispose();
_converter = null;
// Texture is owned and cleaned up by _converter. Set to null to avoid holding a reference to a disposed RenderTexture.
Texture = null;
_disposed = true;
}
}
Expand Down Expand Up @@ -103,30 +110,21 @@ public IEnumerator Update()
var rWidth = VideoBuffer.Width;
var rHeight = VideoBuffer.Height;

var textureChanged = false;
if (Texture == null || Texture.width != rWidth || Texture.height != rHeight)
{
if (Texture != null) UnityEngine.Object.Destroy(Texture);
Texture = new Texture2D((int)rWidth, (int)rHeight, TextureFormat.RGBA32, false);
Texture.ignoreMipmapLimit = false;
textureChanged = true;
}
var rgba = VideoBuffer.ToRGBA();
{
Texture.LoadRawTextureData((IntPtr)rgba.Info.DataPtr, (int)rgba.GetMemorySize());
}
Texture.Apply();
if (_converter == null) _converter = new YuvToRgbConverter();
var textureChanged = _converter.EnsureOutput((int)rWidth, (int)rHeight);
_converter.Convert(VideoBuffer);
if (textureChanged) Texture = _converter.Output;

if (textureChanged)
TextureReceived?.Invoke(Texture);

TextureUploaded?.Invoke();
rgba.Dispose();
}

yield break;
}

// Handle new video stream events
private void OnVideoStreamEvent(VideoStreamEvent e)
{
if (e.StreamHandle != (ulong)Handle.DangerousGetHandle())
Expand Down
8 changes: 8 additions & 0 deletions Runtime/Shaders.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

70 changes: 70 additions & 0 deletions Runtime/Shaders/YuvToRgb.shader
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
Shader "Hidden/LiveKit/YUV2RGB"
{
SubShader
{
Tags { "RenderType" = "Opaque" "Queue" = "Geometry" }
Pass
{
ZTest Always Cull Off ZWrite Off

HLSLPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"

sampler2D _TexY;
sampler2D _TexU;
sampler2D _TexV;

struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};

struct v2f
{
float4 pos : SV_POSITION;
half2 uv : TEXCOORD0;
};

v2f vert(appdata v)
{
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = half2(v.uv);
return o;
}

inline half3 yuvToRgb709Limited(half y, half u, half v)
{
// BT.709 limited range
half c = y - half(16.0 / 255.0);
half d = u - half(128.0 / 255.0);
half e = v - half(128.0 / 255.0);

half Y = half(1.16438356) * c;

half3 rgb;
rgb.r = Y + half(1.79274107) * e;
rgb.g = Y - half(0.21324861) * d - half(0.53290933) * e;
rgb.b = Y + half(2.11240179) * d;
return saturate(rgb);
}

half4 frag(v2f i) : SV_Target
{
// Flip horizontally to match Unity's texture orientation with incoming YUV data
half2 uv = half2(1.0h - i.uv.x, i.uv.y);

half y = tex2D(_TexY, uv).r;
half u = tex2D(_TexU, uv).r;
half v = tex2D(_TexV, uv).r;
return half4(yuvToRgb709Limited(y, u, v), 1.0h);
}
ENDHLSL
}
}
}


9 changes: 9 additions & 0 deletions Runtime/Shaders/YuvToRgb.shader.meta

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading