Unity: Live Video Streaming

I'm trying to stream a live video from one app to the another, Currently i have 2 apps. were app 1 is the server / sender and app 2 is the client / receiver. In app 1 i successfully send the video bytes to the client. and on the client side I'm also receiving all of the bytes. Im using sockets and TCP. The issue that I'm facing is, When i receive the video bytes and assigning them to a Raw Image texture, the image on the texture looks zoomed in too much and it's so pixilated.

Updated Image

enter image description here

This is what i stream enter image description here

and this is what i get on the client.
This is what i see on the rawimage when i receive the bytes

This is the 1st issue, however I'm currently testing from desktop to the another, my goal is to stream form an IPAD to a desktop, and when i do that it's so slow and it kills the app on both the ipad and desktop.

Some troubleshooting i tried so far.

1: I think this is is happening because i have 2 different resolutions because i stream from ipad to Desktop

2: The texture image is too large, i output it and it returns 630. I tried to resize it using Unity Texture2D.resize but i get a gray texture because the function sets the pixels as unidentified

3: I used other libraries for resizing textures and i do get what i want, but after 12 frames the rawimage starts flickering between the video and "?" texture so much then it freezes on both app (ipad and desktop)

4: i believe the way I'm reading the texture is causing the issue because i use both Setpixels and Getpixels functions and they are heavy.

My Code: Server / Sender Side:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
WebCamTexture webCam;
public RawImage myImage;
Texture2D currentTexture;

private TcpListener listner;
private const int port = 8010;
private bool stop = false;

private List<TcpClient> clients = new List<TcpClient>();

private void Start()
{
    // Open the Camera on the desired device, in my case IPAD pro
    webCam = new WebCamTexture();
    // Get all devices , front and back camera
    webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

    // request the lowest width and heigh possible
    webCam.requestedHeight = 10;
    webCam.requestedWidth = 10;


    webCam.Play();

    /
    currentTexture = new Texture2D(webCam.width, webCam.height);

    // Connect to the server
    listner = new TcpListener(port);

    listner.Start();

    // Create Seperate thread for requesting from client 
    Loom.RunAsync(() => {

        while (!stop)
        {
            // Wait for client approval
            var client = listner.AcceptTcpClient();
            // We are connected
            clients.Add(client);


            Loom.RunAsync(() =>
            {
                while (!stop)
                {

                    var stremReader = client.GetStream();

                    if (stremReader.CanRead)
                    {
                        // we need storage for data
                        using (var messageData = new MemoryStream())
                        {
                            Byte[] buffer = new Byte[client.ReceiveBufferSize];


                            while (stremReader.DataAvailable)
                            {
                                int bytesRead = stremReader.Read(buffer, 0, buffer.Length);

                                if (bytesRead == 0)
                                    break;

                                // Writes to the data storage
                                messageData.Write(buffer, 0, bytesRead);

                            }

                            if (messageData.Length > 0)
                            {
                                // send pngImage
                                SendPng(client);

                            }

                        }
                    }
                }
            });
        }

    });



}

private void Update()
{
    myImage.texture = webCam;
}


// Read video pixels and send them to the client
private void SendPng (TcpClient client)
{
    Loom.QueueOnMainThread(() =>
    {
        // Get the webcame texture pixels   
        currentTexture.SetPixels(webCam.GetPixels());
        var pngBytes = currentTexture.EncodeToPNG();


        // Want to Write 
        var stream = client.GetStream();

        // Write the image bytes
        stream.Write(pngBytes, 0, pngBytes.Length);

        // send it 
        stream.Flush();

    });
}

// stop everything
private void OnApplicationQuit()
{
    webCam.Stop();
    stop = true;
    listner.Stop();

    foreach (TcpClient c in clients)
        c.Close();
}



}

Client / receiver side

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets; 
using System.Net;
using System.IO;

public class reciver : MonoBehaviour
{

public RawImage image;

const int port = 8010;

public string IP = "";

TcpClient client;


Texture2D tex;

// Use this for initialization
void Start()
{

    client = new TcpClient();

    // connect to server

    Loom.RunAsync(() => {
        Debug.LogWarning("Connecting to server...");
        // if on desktop
        client.Connect(IPAddress.Loopback, port);

        // if using the IPAD
        //client.Connect(IPAddress.Parse(IP), port);
        Debug.LogWarning("Connected!");




    });

}

float lastTimeRequestedTex = 0;
// Update is called once per frame
void Update()
{

    //if (Time.time - lastTimeRequestedTex < 0.1f)
    //  return;

    lastTimeRequestedTex = Time.time;


    if (!client.Connected)
        return;


    // Send 1 byte to server 
    var serverStream = client.GetStream();

    // request the texture from the server 
    if (serverStream.CanWrite)
    {   
        // Texture request
        // send request
        serverStream.WriteByte(byte.MaxValue);
        serverStream.Flush();
        Debug.Log("Succesfully send 1 byte");
    }


    if (serverStream.CanRead)
    {

        // Read the bytes 
        using (var writer = new MemoryStream())
        {
            var readBuffer = new byte[client.ReceiveBufferSize];


            while (serverStream.DataAvailable)
            {

                int numberOfBytesRead = serverStream.Read(readBuffer, 0, readBuffer.Length);
                if (numberOfBytesRead <= 0)
                {
                    break;
                }

                writer.Write(readBuffer, 0, numberOfBytesRead);


            }

            if (writer.Length > 0)
            {
                // got whole data in writer
                // Get the bytes and apply them to the texture
                var tex = new Texture2D(0, 0);
                tex.LoadImage(writer.ToArray());
                Debug.Log(tex.width + tex.height);
                image.texture = tex;


            }   
        }
    }
}

void OnApplicationQuit()
{
    Debug.LogWarning("OnApplicationQuit");
    client.Close();
}
}

Solution 1:

I ran your code and it worked sometimes and failed sometimes(about 90% of the time). It ran with on my computer with 5 FPS. This will not play well on mobile device which I am sure you are targeting iPad.

There are few problems in your code but they are very serious problems.


1.Your image is not completely received before you load them.

This is why your image look so weird.

The biggest mistake people make when working with socket is to assume that everything you send will be received at once. This is not true. That's the way your client is coded. Please read this.

This is the method I used in my answer:

A.Get Texture2D byte array.

B.Send the byte array length. Not the byte array but the length.

C.The client will read the length first.

D.The client will use that length to read the whole texture data/pixel until completion.

E.Convert the received bytes to array.

You can look at the private int readImageByteSize(int size) and the private void readFrameByteArray(int size) functions for how to read all the bytes.

Of-course, you must also know the length of the data's length that is sent first. The length is saved in int data-type.

The max int value is 2,147,483,647 and that is 10 digit long. So, I made the array length of the array that is sent first to be 15 as a protocol. This is a rule that must be obeyed on the client side too.

This how it works now:

Read the byte array from Texture2D, read the length of that array, send it to the client. Client follows a rule that the first 15 bytes is simply the length. Client would then read that 15 bytes, convert it back into length then use that length in a loop to read complete Texture2D from the server.

The length conversion is done with the void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes) and int frameByteArrayToByteLength(byte[] frameBytesLength) functions. Take a look at those to understand them.


2.Performing socket operation in the Main Thread.

This is why the FPS is 5 on my computer.

Don't do this as this will make your frame rate to be low just like it is already. I have answered many questions like this but won't go deep because it looks like you know what you are doing and tried to use Thread but did it wrong.

A.You were reading from the main Thread when you did: serverStream.Read(readBuffer, 0, readBuffer.Length); in the Update function.

You should have done that inside

Loom.RunAsync(() =>
{ //your red code });

B. You made the-same mistake in the SendPng function, when you were sending data with the stream.Write(pngBytes, 0, pngBytes.Length); in the

Loom.QueueOnMainThread(() =>
{});

Anything you do inside Loom.QueueOnMainThread will be done in the main Thread.

You are supposed to do the sending in another Thread.Loom.RunAsync(() =>{});


Finally, listner = new TcpListener(port); is obsolute. This did not cause any problem but use listner = new TcpListener(IPAddress.Any, port); in your server code which should listen to nay IP.

The final FPS is over 50 on my computer after making all these fixes. The code below can be improved a-lot. I will leave that for you to do.

You can use online code compare to see things that changed in each class.

SERVER:

using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
using System;
using System.Text;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Collections.Generic;

public class Connecting : MonoBehaviour
{
    WebCamTexture webCam;
    public RawImage myImage;
    public bool enableLog = false;

    Texture2D currentTexture;

    private TcpListener listner;
    private const int port = 8010;
    private bool stop = false;

    private List<TcpClient> clients = new List<TcpClient>();

    //This must be the-same with SEND_COUNT on the client
    const int SEND_RECEIVE_COUNT = 15;

    private void Start()
    {
        Application.runInBackground = true;

        //Start WebCam coroutine
        StartCoroutine(initAndWaitForWebCamTexture());
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }

    IEnumerator initAndWaitForWebCamTexture()
    {
        // Open the Camera on the desired device, in my case IPAD pro
        webCam = new WebCamTexture();
        // Get all devices , front and back camera
        webCam.deviceName = WebCamTexture.devices[WebCamTexture.devices.Length - 1].name;

        // request the lowest width and heigh possible
        webCam.requestedHeight = 10;
        webCam.requestedWidth = 10;

        myImage.texture = webCam;

        webCam.Play();

        currentTexture = new Texture2D(webCam.width, webCam.height);

        // Connect to the server
        listner = new TcpListener(IPAddress.Any, port);

        listner.Start();

        while (webCam.width < 100)
        {
            yield return null;
        }

        //Start sending coroutine
        StartCoroutine(senderCOR());
    }

    WaitForEndOfFrame endOfFrame = new WaitForEndOfFrame();
    IEnumerator senderCOR()
    {

        bool isConnected = false;
        TcpClient client = null;
        NetworkStream stream = null;

        // Wait for client to connect in another Thread 
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                // Wait for client connection
                client = listner.AcceptTcpClient();
                // We are connected
                clients.Add(client);

                isConnected = true;
                stream = client.GetStream();
            }
        });

        //Wait until client has connected
        while (!isConnected)
        {
            yield return null;
        }

        LOG("Connected!");

        bool readyToGetFrame = true;

        byte[] frameBytesLength = new byte[SEND_RECEIVE_COUNT];

        while (!stop)
        {
            //Wait for End of frame
            yield return endOfFrame;

            currentTexture.SetPixels(webCam.GetPixels());
            byte[] pngBytes = currentTexture.EncodeToPNG();
            //Fill total byte length to send. Result is stored in frameBytesLength
            byteLengthToFrameByteArray(pngBytes.Length, frameBytesLength);

            //Set readyToGetFrame false
            readyToGetFrame = false;

            Loom.RunAsync(() =>
            {
                //Send total byte count first
                stream.Write(frameBytesLength, 0, frameBytesLength.Length);
                LOG("Sent Image byte Length: " + frameBytesLength.Length);

                //Send the image bytes
                stream.Write(pngBytes, 0, pngBytes.Length);
                LOG("Sending Image byte array data : " + pngBytes.Length);

                //Sent. Set readyToGetFrame true
                readyToGetFrame = true;
            });

            //Wait until we are ready to get new frame(Until we are done sending data)
            while (!readyToGetFrame)
            {
                LOG("Waiting To get new frame");
                yield return null;
            }
        }
    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    private void Update()
    {
        myImage.texture = webCam;
    }

    // stop everything
    private void OnApplicationQuit()
    {
        if (webCam != null && webCam.isPlaying)
        {
            webCam.Stop();
            stop = true;
        }

        if (listner != null)
        {
            listner.Stop();
        }

        foreach (TcpClient c in clients)
            c.Close();
    }
}

CLIENT:

using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using System.Net.Sockets;
using System.Net;
using System.IO;
using System;

public class reciver : MonoBehaviour
{
    public RawImage image;
    public bool enableLog = false;

    const int port = 8010;
    public string IP = "192.168.1.165";
    TcpClient client;

    Texture2D tex;

    private bool stop = false;

    //This must be the-same with SEND_COUNT on the server
    const int SEND_RECEIVE_COUNT = 15;

    // Use this for initialization
    void Start()
    {
        Application.runInBackground = true;

        tex = new Texture2D(0, 0);
        client = new TcpClient();

        //Connect to server from another Thread
        Loom.RunAsync(() =>
        {
            LOGWARNING("Connecting to server...");
            // if on desktop
            client.Connect(IPAddress.Loopback, port);

            // if using the IPAD
            //client.Connect(IPAddress.Parse(IP), port);
            LOGWARNING("Connected!");

            imageReceiver();
        });
    }


    void imageReceiver()
    {
        //While loop in another Thread is fine so we don't block main Unity Thread
        Loom.RunAsync(() =>
        {
            while (!stop)
            {
                //Read Image Count
                int imageSize = readImageByteSize(SEND_RECEIVE_COUNT);
                LOGWARNING("Received Image byte Length: " + imageSize);

                //Read Image Bytes and Display it
                readFrameByteArray(imageSize);
            }
        });
    }


    //Converts the data size to byte array and put result to the fullBytes array
    void byteLengthToFrameByteArray(int byteLength, byte[] fullBytes)
    {
        //Clear old data
        Array.Clear(fullBytes, 0, fullBytes.Length);
        //Convert int to bytes
        byte[] bytesToSendCount = BitConverter.GetBytes(byteLength);
        //Copy result to fullBytes
        bytesToSendCount.CopyTo(fullBytes, 0);
    }

    //Converts the byte array to the data size and returns the result
    int frameByteArrayToByteLength(byte[] frameBytesLength)
    {
        int byteLength = BitConverter.ToInt32(frameBytesLength, 0);
        return byteLength;
    }


    /////////////////////////////////////////////////////Read Image SIZE from Server///////////////////////////////////////////////////
    private int readImageByteSize(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytesCount = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytesCount, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        int byteLength;

        if (disconnected)
        {
            byteLength = -1;
        }
        else
        {
            byteLength = frameByteArrayToByteLength(imageBytesCount);
        }
        return byteLength;
    }

    /////////////////////////////////////////////////////Read Image Data Byte Array from Server///////////////////////////////////////////////////
    private void readFrameByteArray(int size)
    {
        bool disconnected = false;

        NetworkStream serverStream = client.GetStream();
        byte[] imageBytes = new byte[size];
        var total = 0;
        do
        {
            var read = serverStream.Read(imageBytes, total, size - total);
            //Debug.LogFormat("Client recieved {0} bytes", total);
            if (read == 0)
            {
                disconnected = true;
                break;
            }
            total += read;
        } while (total != size);

        bool readyToReadAgain = false;

        //Display Image
        if (!disconnected)
        {
            //Display Image on the main Thread
            Loom.QueueOnMainThread(() =>
            {
                displayReceivedImage(imageBytes);
                readyToReadAgain = true;
            });
        }

        //Wait until old Image is displayed
        while (!readyToReadAgain)
        {
            System.Threading.Thread.Sleep(1);
        }
    }


    void displayReceivedImage(byte[] receivedImageBytes)
    {
        tex.LoadImage(receivedImageBytes);
        image.texture = tex;
    }


    // Update is called once per frame
    void Update()
    {


    }


    void LOG(string messsage)
    {
        if (enableLog)
            Debug.Log(messsage);
    }

    void LOGWARNING(string messsage)
    {
        if (enableLog)
            Debug.LogWarning(messsage);
    }

    void OnApplicationQuit()
    {
        LOGWARNING("OnApplicationQuit");
        stop = true;

        if (client != null)
        {
            client.Close();
        }
    }
}