My goal is to create a Unity AR application (which will later be exported to Android) where the user can see what an IP camera is streaming.
For that, I'm using a mesh renderer (a quad in this case) to display the image and then added an AR camera in the Unity world pointing at it. I got most of the following code from around the Internet and I was told that I would need to use a C# script reference called LoadRawTextureData() in order to convert the stream bytes into texture.
using UnityEngine.UI;
using UnityEngine.Networking;
using System.Collections;
using System.Collections.Generic;
using System;
using System.Net;
using System.IO;
public class IPCamConfig : MonoBehaviour
{
[HideInInspector]
public Byte[] JpegData;
[HideInInspector]
public string resolution = "320x240";
private Texture2D camTexture;
public Stream stream;
private WebResponse resp;
public MeshRenderer frame;
private void Start()
{
GetVideo();
FindLength(stream);
}
void OnDestroy()
{
StopStream();
}
public void GetVideo()
{
// create HTTP request
resolution = "320x240";
string url = "http://192.168.1.104/videostream.asf?usr=admin&pwd=" + resolution + "&resolution=320*240";
HttpWebRequest req = (HttpWebRequest)WebRequest.Create(url);
req.Credentials = new NetworkCredential("username", "password");
// get response
resp = req.GetResponse();
camTexture = new Texture2D(4, 4, TextureFormat.ARGB32, false);
camTexture.LoadRawTextureData(JpegData);
camTexture.Apply();
GetComponent<Renderer>().material.mainTexture = camTexture;
// get response stream
stream = resp.GetResponseStream();
frame.material.color = Color.white;
StartCoroutine(GetFrame());
}
public IEnumerator GetFrame()
{
while (true)
{
int bytesToRead = FindLength(stream);
if (bytesToRead == -1)
{
// print("End of stream");
yield break;
}
int leftToRead = bytesToRead;
while (leftToRead > 0)
{
// print (leftToRead);
leftToRead -= stream.Read(JpegData, bytesToRead - leftToRead, leftToRead);
yield return null;
}
MemoryStream ms = new MemoryStream(JpegData, 0, bytesToRead, false, true);
camTexture.LoadImage(ms.GetBuffer());
frame.material.mainTexture = camTexture;
frame.material.color = Color.white;
stream.ReadByte(); // CR after bytes
stream.ReadByte(); // LF after bytes
}
}
int FindLength(Stream stream)
{
int b;
string line = "";
int result = -1;
bool atEOL = false;
while ((b = stream.ReadByte()) != -1)
{
if (b == 10) continue; // ignore LF char
if (b == 13)
{ // CR
if (atEOL)
{ // two blank lines means end of header
stream.ReadByte(); // eat last LF
return result;
}
if (line.StartsWith("Content-Length:"))
{
result = Convert.ToInt32(line.Substring("Content-Length:".Length).Trim());
}
else
{
line = "";
}
atEOL = true;
}
else
{
atEOL = false;
line += (char)b;
}
}
return -1;
}
public void StopStream()
{
stream.Close();
resp.Close();
}
}
I've implemented LoadRawTextureData() inside the GetVideo() function because that's where it will verify the IP camera URL and supposedly render the texture from its footage.
Instead, when I run the application in Unity, I'm constantly getting the following error:
UnityEngine.Texture2D:LoadRawTextureData(Byte[])
IPCamConfig:GetVideo() (at Assets/Scripts/IPCamConfig.cs:48)
IPCamConfig:Start() (at Assets/Scripts/IPCamConfig.cs:25)
Is there something else I'm missing? Or do I have to use another script reference/API for byte conversion? Any help is appreciated.