Unity2D: Texture property got Unassigned Reference Exception - c#

In my Unity2D project, I made a prefab contains RawImage gameObject to rendering thumbnail images.
Here is the structure of the prefab:
I'm using two methods to load images from web sources with URLs, and rendering to the Texture property of RawImage.
void ReadData()
{
foreach (GameObject thumbnailGrouping in thumbnailList)
{
Destroy(thumbnailGrouping.gameObject);
}
thumbnailList.Clear();
GroupingConfigration.Page.PageResponse people = webConfig.page.pageResponse;
TextMeshProUGUI title = thumbnailGrouping.GetComponentInChildren<TextMeshProUGUI>();
title.text = people.collection.name;
// Store the thumbnails urls
List<string> thumbnails = new List<string>();
foreach (var documentNode in people.classifier.classifierNode[0].documentNode)
{
string content = documentNode.metadataList.metadata[0].content;
int startIndex = content.IndexOf("src=\"") + "src=\"".Length;
int length = content.IndexOf("\"", startIndex) - startIndex;
string url = content.Substring(startIndex, length);
thumbnails.Add("https://cs-turns-50.interactwith.us" + url);
}
GameObject grouping = Instantiate(thumbnailGrouping);
grouping.name = title.text;
grouping.transform.SetParent(GameObject.Find("Canvas/FloatingMedias/ThumbnailView").transform, false);
RawImage[] rawImages = grouping.transform.GetComponentsInChildren<RawImage>(true);
// Set the texture of each RawImage
for (int i = 0; i < rawImages.Length; i++)
{
StartCoroutine(LoadImage(thumbnails[i], rawImages[i]));
}
}
IEnumerator LoadImage(string url, RawImage rawImage)
{
UnityWebRequest request = UnityWebRequestTexture.GetTexture(url);
yield return request.SendWebRequest();
if (request.isNetworkError || request.isHttpError)
{
Debug.Log(request.error);
}
else
{
Texture2D texture = ((DownloadHandlerTexture)request.downloadHandler).texture;
rawImage.texture = texture;
Debug.Log("Width: " + rawImage.texture.width + ", Height: " + rawImage.texture.height);
}
}
However, I caught the errors from last debug: "UnassignedReferenceException: The variable m_Texture of RawImage has not been assigned.
You probably need to assign the m_Texture variable of the RawImage script in the inspector." which because the texture is null.
I have checked it got the correct image URL with "gif" extension. the parent object of rawImage[i] is ThumbnailGrouping. the rawImage[i] itself is the RawImage component, it contains the properties of a common RawImage component, and all correct except the Texture value, which is null.
Why does this "null" problem happen? and how do I fix it?

Related

Rotate Texture2D from XRCpuImage and transfer it to RawImage - Crashes after approx. 30 seconds

I get the last image via XRCpuImage convert it to a Texture2D and transfer it to a RawImage.
The Result looks like this: XRCpuImage on RawImage - no rotation
As you can see, the transferred texture is rotated, so I use a function called "rotateTexture" to rotate the texture to the correct position:Rotated Texture
The rotation looks good so far, but when I run the application it crashes after about 30 seconds.
Could someone tell me what could be wrong with the code below:
using System;
using Unity.Collections.LowLevel.Unsafe;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.ARFoundation;
using UnityEngine.XR.ARSubsystems;
using Unity.Collections;
public class CPU_RawImage : MonoBehaviour
{
Texture2D m_CameraTexture;
GameObject DR_layer;
[SerializeField]
[Tooltip("The ARCameraManager which will produce frame events.")]
ARCameraManager m_CameraManager; //Anlegen m_CameraManger
/// <summary>
/// Get or set the <c>ARCameraManager</c>.
/// </summary>
public ARCameraManager cameraManager
{
get => m_CameraManager;
set => m_CameraManager = value; //Besetzen mit ARCamera
}
[SerializeField]
RawImage m_RawCameraImage;
/// <summary>
/// The UI RawImage used to display the image on screen.
/// </summary>
public RawImage rawCameraImage
{
get => m_RawCameraImage;
set => m_RawCameraImage = value;
}
XRCpuImage.Transformation m_Transformation = XRCpuImage.Transformation.MirrorX;
void OnEnable()
{
if (m_CameraManager != null)
{
m_CameraManager.frameReceived += OnCameraFrameReceived;
}
}
void OnDisable()
{
if (m_CameraManager != null)
{
m_CameraManager.frameReceived -= OnCameraFrameReceived;
}
}
//Rotate Funktion
private Texture2D rotateTexture(Texture2D originalTexture, bool clockwise)
{
Color32[] original = originalTexture.GetPixels32();
Color32[] rotated = new Color32[original.Length];
int w = originalTexture.width;
int h = originalTexture.height;
int iRotated, iOriginal;
for (int j = 0; j < h; ++j)
{
for (int i = 0; i < w; ++i)
{
iRotated = (i + 1) * h - j - 1;
iOriginal = clockwise ? original.Length - 1 - (j * w + i) : j * w + i;
rotated[iRotated] = original[iOriginal];
}
}
Texture2D rotatedTexture = new Texture2D(h, w);
rotatedTexture.SetPixels32(rotated);
rotatedTexture.Apply();
return rotatedTexture;
}
unsafe void UpdateCameraImage(int[] pixel)
{
// Attempt to get the latest camera image. If this method succeeds,
// it acquires a native resource that must be disposed (see below).
if (!cameraManager.TryAcquireLatestCpuImage(out XRCpuImage image))
{
return;
}
// Once we have a valid XRCpuImage, we can access the individual image "planes"
// (the separate channels in the image). XRCpuImage.GetPlane provides
// low-overhead access to this data. This could then be passed to a
// computer vision algorithm. Here, we will convert the camera image
// to an RGBA texture and draw it on the screen.
// Choose an RGBA format.
// See XRCpuImage.FormatSupported for a complete list of supported formats.
var format = TextureFormat.RGBA32;
if (m_CameraTexture == null || m_CameraTexture.width != image.width || m_CameraTexture.height != image.height)
{
m_CameraTexture = new Texture2D(image.width, image.height, format, false);
}
// Convert the image to format, flipping the image across the Y axis.
// We can also get a sub rectangle, but we'll get the full image here.
var conversionParams = new XRCpuImage.ConversionParams(image, format, m_Transformation);
// Texture2D allows us write directly to the raw texture data
// This allows us to do the conversion in-place without making any copies.
var rawTextureData = m_CameraTexture.GetRawTextureData<byte>();
try
{
image.Convert(conversionParams, new IntPtr(rawTextureData.GetUnsafePtr()), rawTextureData.Length);
}
finally
{
// We must dispose of the XRCpuImage after we're finished
// with it to avoid leaking native resources.
image.Dispose();
}
// Apply the updated texture data to our texture
m_CameraTexture.Apply();
//rotate Texture clockwise
//m_CameraTexture = rotateTexture(m_CameraTexture, true);
// Set the RawImage's texture so we can visualize it.
m_RawCameraImage.texture = m_CameraTexture;
}
void Start(){
DR_layer = GameObject.Find("RawImage");
}
void OnCameraFrameReceived(ARCameraFrameEventArgs eventArgs)
{
int[] pixel = {200,1000};
UpdateCameraImage(pixel);
}
}

Hiding Clone Game Object with Tag

I got an error while trying hiding and showing a group of clone object with tag. the clone is dynamic(position). I have toggle to visible and visible some information text. Now I want to hide and unhide the game object with tag. ("windtag"). Also tryna hiding those gameobject in arrow method.
public GameObject[] test;
public void invisible(bool log)
{
test= GameObject.FindGameObjectsWithTag("windtag");
test.GetComponent<Renderer>().enabled = log;
}
public void clone()
{
Canvas newcanvas = Instantiate(canvas);
//Use .SetParent(canvasName,false)
Text cloneposition = Instantiate(shiposition, newpos);
Text clonewind = Instantiate(windspeedtext, newpos);
Text clonedlow = Instantiate(flowtext, newpos);
Text clonetemperature = Instantiate(temperaturetext, newpos);
newcanvas.transform.position = shipvalue.transform.position;
cloneposition.transform.SetParent(newcanvas.transform, false);
clonewind.transform.SetParent(newcanvas.transform, false);
clonedlow.transform.SetParent(newcanvas.transform, false);
clonetemperature.transform.SetParent(newcanvas.transform, false);
clonewind.gameObject.SetActive(false);
clonetemperature.gameObject.SetActive(false);
}
public void arrow(float[,] arrowdata)
{
for (int x = 0; x < arrowdata.GetLength(0); x++)
{
for (int y = 0; y < arrowdata.GetLength(1); y++)
{
if (grid[x,y] ==1)
{
if (arrowdata[x, y] == 5)
{
GameObject referenceArrow = Instantiate(Resources.Load("down")) as GameObject;
float posY = shipvalue.transform.position.y - 9f;
referenceArrow.transform.position = new Vector3(shipvalue.transform.position.x-0.5f, posY);
}
if (arrowdata[x, y] == 4)
{
GameObject referenceArrow = Instantiate(Resources.Load("top left")) as GameObject;
float posY = shipvalue.transform.position.y - 9f;
referenceArrow.transform.position = new Vector3(shipvalue.transform.position.x - 0.5f, posY);
}
if (arrowdata[x, y] == 3)
{
GameObject referenceArrow = Instantiate(Resources.Load("top right")) as GameObject;
float posY = shipvalue.transform.position.y - 9f;
referenceArrow.transform.position = new Vector3(shipvalue.transform.position.x - 0.5f, posY);
}
}
}
}
}
this is the error :
Severity Code Description Project File Line Suppression State
Error CS1061 'GameObject[]' does not contain a definition for 'GetComponent' and no accessible extension method 'GetComponent' accepting a first argument of type 'GameObject[]' could be found (are you missing a using directive or an assembly reference?) Assembly-CSharp C:\Users\Skylarking\Unity\My First Game\MyFirstGame\Assets\Scripts\test.cs 391 Active
I think your current problem is that there is more than one Object with the Tag "windtag". So if you search for GameObject with that tag more than one is returned. Resulting in a GameObject Array instead of one GameObject.
An easy fix could be to add different Tags to all GameObject and then giving that string to the function.
invisible(false, "tagexample1");
public void invisible(bool log, string tag)
{
test= GameObject.FindGameObjectWithTag(tag);
test.GetComponent<Renderer>().enabled = log;
}
Or you could put your GetComponent<Renderer>() into a for loop. To hide/show all elements with the windtag. If that is what you want.
public void invisible(bool log)
{
test= GameObject.FindGameObjectsWithTag("windtag");
foreach (GameObject gm in test){
gm.GetComponent<Renderer>().enabled = log;
}
}

Capturing a snapshot of VideoBackground within Unity Vuforia plugin

I would like to capture the realworld view at the moment when content is placed using a GroundPlane or Mid Air stage.
This seems to be readily available within the AR camera's BackgroundPlane Mesh Renderer - Custom/VideoBackground (see screenshot below). However, when I try to access this texture and encode to JPG, the output image is black.
Here is the code I am testing with:
MeshRenderer backgroundMesh=GameObject.Find("BackgroundPlane").GetComponent<MeshRenderer>();
Texture2D texture=(Texture2D)backgroundMesh.material.mainTexture;
byte[] bytes = texture.EncodeToJPG();
var dirPath = Application.dataPath + "/../SavedImages/";
if(!Directory.Exists(dirPath)) {
Directory.CreateDirectory(dirPath);
}
File.WriteAllBytes(dirPath + "Image" + ".jpg", bytes);
Here is a screenshot of the vuforia settings for Video Background:
You can use Vuforia image class for capturing the real world only.
The scripts are tested on mobile, and used in FMETP STREAM.
For your case, you can convert texture2d as jpg.
using UnityEngine;
using System.Collections;
using Vuforia;
using UnityEngine.UI;
public class VuforiaCamAccess : MonoBehaviour
{
private bool mAccessCameraImage = true;
public RawImage rawImage;
public GameObject Mesh;
private Texture2D texture;
#if UNITY_EDITOR
private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.GRAYSCALE;
#else
private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.RGB888;
#endif
private bool mFormatRegistered = false;
void Start()
{
#if UNITY_EDITOR
texture = new Texture2D(Screen.width, Screen.height, TextureFormat.R8, false);
#else
texture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24, false);
#endif
// Register Vuforia life-cycle callbacks:
Vuforia.VuforiaARController.Instance.RegisterVuforiaStartedCallback(OnVuforiaStarted);
Vuforia.VuforiaARController.Instance.RegisterOnPauseCallback(OnPause);
Vuforia.VuforiaARController.Instance.RegisterTrackablesUpdatedCallback(OnTrackablesUpdated);
}
private void OnVuforiaStarted()
{
// Try register camera image format
if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
{
Debug.Log("Successfully registered pixel format " + mPixelFormat.ToString());
mFormatRegistered = true;
}
else
{
Debug.LogError("Failed to register pixel format " + mPixelFormat.ToString() +
"\n the format may be unsupported by your device;" +
"\n consider using a different pixel format.");
mFormatRegistered = false;
}
}
private void OnPause(bool paused)
{
if (paused)
{
Debug.Log("App was paused");
UnregisterFormat();
}
else
{
Debug.Log("App was resumed");
RegisterFormat();
}
}
private void OnTrackablesUpdated()
{
//skip if still loading image to texture2d
if (LoadingTexture) return;
if (mFormatRegistered)
{
if (mAccessCameraImage)
{
Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
//if (image != null && image.IsValid())
if (image != null)
{
byte[] pixels = image.Pixels;
int width = image.Width;
int height = image.Height;
StartCoroutine(SetTexture(pixels, width, height));
}
}
}
}
bool LoadingTexture = false;
IEnumerator SetTexture(byte[] pixels, int width, int height)
{
if (!LoadingTexture)
{
LoadingTexture = true;
if (pixels != null && pixels.Length > 0)
{
if (texture.width != width || texture.height != height)
{
#if UNITY_EDITOR
texture = new Texture2D(width, height, TextureFormat.R8, false);
#else
texture = new Texture2D(width, height, TextureFormat.RGB24, false);
#endif
}
texture.LoadRawTextureData(pixels);
texture.Apply();
if (rawImage != null)
{
rawImage.texture = texture;
rawImage.material.mainTexture = texture;
}
if (Mesh != null) Mesh.GetComponent<Renderer>().material.mainTexture = texture;
}
yield return null;
LoadingTexture = false;
}
}
private void UnregisterFormat()
{
Debug.Log("Unregistering camera pixel format " + mPixelFormat.ToString());
CameraDevice.Instance.SetFrameFormat(mPixelFormat, false);
mFormatRegistered = false;
}
private void RegisterFormat()
{
if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
{
Debug.Log("Successfully registered camera pixel format " + mPixelFormat.ToString());
mFormatRegistered = true;
}
else
{
Debug.LogError("Failed to register camera pixel format " + mPixelFormat.ToString());
mFormatRegistered = false;
}
}
}
I was able to resolve this issue by working with the Vuforia ARCamera game object directly, rather than the BackgroundPlane Mesh Renderer. The ARCamera does not have a targetTexture set; as it outputs directly to the screen. However, I am able to set a temporary targetTexture to which to output a frame and then remove it (the targetTexture) immediately after processing, so that the AR mode may continue.
There is also a further solution. This is to use the TextureBufferCamera, which is created by Vuforia at runtime. This already outputs to a targetTexture. But, it is a fixed resolution and therefore the ARCamera is better for my specific requirement.

Load sprites from persistent data path

I am downloading some sprites from my server and store them in Application.persistentDataPath.
However, I cannot load the controller using Resources.Load (controllerPath) because the path is outside the Resources folder.
Additionally, I get a MissingComponentException when I try to add the animation controller to the GameObject.
Here is my code:
private GameObject SideSprite;
// ...
string controllerPath = Application.persistentDataPath+"/"+aux+"/"+aux+"Controller";
controller = (RuntimeAnimatorController)Resources.Load (controllerPath); // Returns null
// Below I get:
// MissingComponentException: There is no 'Animator' attached to the
// "Missing Prefab (Dummy)" game object, but a script is trying to access it.
SideSprite.GetComponent<Animator> ().runtimeAnimatorController = controller;
How should I load the resources from the persistent data path?
persistentDataPath is used as any regular folder. I would not store a Sprite but more likely a texture and next time you need it you unroll the process of applying a texture to a sprite:
public static void StoreCacheSprite(string url, Sprite sprite)
{
if(sprite == null || string.IsNullOrEmpty(url) == true) { return; }
SpriteRenderer spRend = sprite.GetComponent<SpriteRenderer>();
Texture2D tex = spRend.material.mainTexture;
byte[] bytes = tex.EncodeToPNG();
string path = Path.Combine(Application.persistentDataPath, url);
File.WriteAllBytes(Application.persistentDataPath, bytes);
}
public static Sprite GetCacheSprite(string url)
{
if( string.IsNullOrEmpty(url) == true) { return; }
string path = Path.Combine(Application.persistentDataPath, url);
if(File.Exists(path) == true)
{
bytes = File.ReadAllBytes(path);
Texture2D texture = new Texture2D(4, 4, TextureFormat.RGBA32, false);
texture.LoadImage(bytes);
Sprite sp = Sprite.Create(texture, new Rect(0,0 texture.width, texture.height, new Vector2(0.5f,0.5f));
return sp;
}
return null;
}
The first method stores the texture using the File class from .NET. It converts and writes a byte array onto the ROM of the device (File.WriteAllBytes). You need a path to a Sprite and a name for it. That name needs to comply with file and folder path naming.
The second method does the inverse process, checking if it is already stored and turning the byte array found on the RAM into a usable Sprite.
You could also simply use WWW to get the data.
string controllerPath = Application.persistentDataPath+"/"+aux+"/"+aux+"Controller";
IEnumerator Start ()
{
WWW www = new WWW("file:///" + controllerPath);
yield return www;
Debug.Log(www.texture); //or www.bytes
}

photon rpc is not calling correctly(Photon RPC with character generating)

I'm trying to generate character with script.
I have 'playerObj' and 'model' pref under resource folder. playerObj has all the script related controlling, and photonView, Camera, etc. and model has animation, skin renderer, and photonView.
What am I trying to do is, instantiating playerObj and instantiating model object together. and with model obj, I want to add some skin mesh so it has character's model (I did this so player is able to change model's outfit) and then parenting playerObj with model.
Problem is it did instantiating own character correctly, but the instantiating other character is wrong.
it will instantiating both playerObj and model Obj. but it doesn't rendering skin mesh and doesn't parenting those two...
I don't know what i did wrong or missing something...
as you see down of the script i'm using three different script to generate character.
Thank you.
PlayerInit.cs
public IEnumerator CreateCharacter()
{
var co = CurrentCharacter.GetByGUI(dbid);
co.InitModel(character, dbid);
m_loaded = true;
m_loading = false;
}
CharacterGenerator.cs
public GameObject Generate(PhotonView pv)
{
int id1 = PhotonNetwork.AllocateViewID();
GameObject root = PhotonNetwork.Instantiate ("Model", new Vector3 (0, 0, 0), new Quaternion (0, 0, 0, 0),0);
root.name = "body";
Generate (pv, root);
return root;
}
public GameObject Generate(PhotonView pv, GameObject root)
{
int id1 = PhotonNetwork.AllocateViewID();
putSkin (root);
pv.RPC("putSkin", PhotonTargets.Others, root, id1, PhotonNetwork.player);
return root;
}
// Creates a character based on the currentConfiguration recycling a
// character base, this way the position and animation of the character
// are not changed.
[RPC]
public void putSkin(GameObject root){
float startTime = Time.realtimeSinceStartup;
// The SkinnedMeshRenderers that will make up a character will be
// combined into one SkinnedMeshRenderers using multiple materials.
// This will speed up rendering the resulting character.
List<CombineInstance> combineInstances = new List<CombineInstance>();
List<Material> materials = new List<Material>();
List<Transform> bones = new List<Transform>();
Transform[] transforms = root.GetComponentsInChildren<Transform>();
foreach (CharacterElement element in currentConfiguration.Values)
{
SkinnedMeshRenderer smr = element.GetSkinnedMeshRenderer();
materials.AddRange(smr.materials);
for (int sub = 0; sub < smr.sharedMesh.subMeshCount; sub++)
{
CombineInstance ci = new CombineInstance();
ci.mesh = smr.sharedMesh;
ci.subMeshIndex = sub;
combineInstances.Add(ci);
}
// As the SkinnedMeshRenders are stored in assetbundles that do not
// contain their bones (those are stored in the characterbase assetbundles)
// we need to collect references to the bones we are using
foreach (string bone in element.GetBoneNames())
{
foreach (Transform transform in transforms)
{
if (transform.name != bone) continue;
bones.Add(transform);
break;
}
}
Object.Destroy(smr.gameObject);
}
// Obtain and configure the SkinnedMeshRenderer attached to
// the character base.
SkinnedMeshRenderer r = root.GetComponent<SkinnedMeshRenderer>();
r.sharedMesh = new Mesh();
r.sharedMesh.CombineMeshes(combineInstances.ToArray(), false, false);
r.bones = bones.ToArray();
r.materials = materials.ToArray();
r.useLightProbes = true;
Debug.Log("Generating character took: "
+ (Time.realtimeSinceStartup - startTime) * 1000 + " ms");
}
CurrentCharacter.cs
[PunRPC]
public void InitModel(GameObject prefab, GameObject model, int id){
character = prefab;
InitModel (model, id);
}
public void InitModel(GameObject model, int id)
{
parenting (model, id);
_photonView.RPC ("parenting", PhotonTargets.Others, model, id);
}
[RPC]
void parenting(GameObject model, int id){
model.layer = LayerMask.NameToLayer ("Girl");
model.transform.parent = character.transform;
model.transform.localPosition = Vector3.zero;
InitMouseLook (model);
InitAnimations (model);
if (id == PersistentData.Instance.LoggedInUser.m_id) {
CurrentCharacter.persistentMainModel = model;
}
}

Categories

Resources