I’m trying to follow the Point Cloud tutorial. Using Astra Pro. Activated with free license. Unity 2019.1.0f2. Works with Astra package in same version. Their documentation is not good.
It seems like RGB camera is not streaming.
Here is the script.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class myCloud : MonoBehaviour
{
[SerializeField] Material depthMat = null, colorMat = null; //materials for depth and color output
nuitrack.DepthFrame depthFrame = null;
nuitrack.ColorFrame colorFrame = null;
[SerializeField] int hRes;
int frameStep;
[SerializeField] Color defaultColor;
Texture2D depthTexture, rgbTexture;
private const TextureFormat Format = TextureFormat.RGB24;
Color[] depthColors;
Color[] rgbColors;
bool initialized = false;
// Start is called before the first frame update
void Start()
{
if (!initialized) Initialize();
}
void Initialize()
{
initialized = true;
nuitrack.OutputMode modeColor = NuitrackManager.ColorSensor.GetOutputMode(); // Return the struct with resolution, FPS and FOV of the sensor
nuitrack.OutputMode modeDepth = NuitrackManager.DepthSensor.GetOutputMode(); // Return the struct with resolution, FPS and FOV of the sensor
frameStep = modeDepth.XRes / hRes;
if (frameStep <= 0) frameStep = 1; // frameStep must be > 0
hRes = modeDepth.XRes / frameStep;
// Define height and width, create mesh (cube) instances
InitMeshes(
((modeDepth.XRes / frameStep)), // Width
((modeDepth.YRes / frameStep)), // Height
modeDepth.HFOV);
}
void InitMeshes(int cols, int rows, float hfov)
{
// Set the size of the arrays
depthColors = new Color[cols * rows];
rgbColors = new Color[cols * rows];
// Create a depth texture
depthTexture = new Texture2D(cols, rows, TextureFormat.RFloat, false);
depthTexture.filterMode = FilterMode.Point;
depthTexture.wrapMode = TextureWrapMode.Clamp;
depthTexture.Apply();
// Create an RGB texture
rgbTexture = new Texture2D(cols, rows, TextureFormat.ARGB32, false);
rgbTexture.filterMode = FilterMode.Point;
rgbTexture.wrapMode = TextureWrapMode.Clamp;
rgbTexture.Apply();
//Applying textures to the materials
depthMat.mainTexture = depthTexture;
colorMat.mainTexture = rgbTexture;
}
// Update is called once per frame
void Update()
{
bool haveNewFrame = false;
if ((NuitrackManager.DepthFrame != null && NuitrackManager.ColorFrame != null))
{
if (depthFrame != null)
{
haveNewFrame = (depthFrame != NuitrackManager.DepthFrame);
}
depthFrame = NuitrackManager.DepthFrame;
colorFrame = NuitrackManager.ColorFrame;
if (haveNewFrame)
{
ProcessFrame(depthFrame, colorFrame);
}
}
}
void ProcessFrame(nuitrack.DepthFrame depthFrame, nuitrack.ColorFrame colorFrame)
{
int pointIndex = 0;
for (int i = 0; i < depthFrame.Rows; i += frameStep)
{
for (int j = 0; j < depthFrame.Cols; j += frameStep)
{
// Take the frame depths and include it in the depthColors array
depthColors[pointIndex].r = depthFrame[i, j] / 16384f;
// Take the frame RGB colors and include it in the rgbColors array
// If the camera colors are not received, the default color is applied
Color rgbCol = defaultColor;
if (colorFrame != null)
{
rgbCol = new Color32(colorFrame[i, j].Red, colorFrame[i, j].Green, colorFrame[i, j].Blue, 255);
}
else
{
Debug.Log("No Color Frame ");
}
rgbColors[pointIndex] = rgbCol;
nuitrack.ColorFrame frame;
++pointIndex;
}
}
depthTexture.SetPixels(depthColors);
rgbTexture.SetPixels(rgbColors);
depthTexture.Apply();
rgbTexture.Apply();
}
}
I shall wait for a quick reply.