How to take Screen Shot in Particular Area in unity - c#

I'm using Unity 2018. In my project i have to take particular area screen. I have using the below code. It is working. But The exact image is not working. It goes some extent. How can i take the exact image.
using UnityEngine;
using System.Collections;
using System;
public class ScreenCapture : MonoBehaviour
{
public RenderTexture overviewTexture;
GameObject OVcamera;
public string path = "";
void Start()
{
OVcamera = GameObject.FindGameObjectWithTag("OverviewCamera");
}
void LateUpdate()
{
if (Input.GetKeyDown("f9"))
{
StartCoroutine(TakeScreenShot());
}
}
// return file name
string fileName(int width, int height)
{
return string.Format("screen_{0}x{1}_{2}.png",
width, height,
System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss"));
}
public IEnumerator TakeScreenShot()
{
yield return new WaitForEndOfFrame();
Camera camOV = OVcamera.camera;
RenderTexture currentRT = RenderTexture.active;
RenderTexture.active = camOV.targetTexture;
camOV.Render();
Texture2D imageOverview = new Texture2D(camOV.targetTexture.width, camOV.targetTexture.height,
TextureFormat.RGB24, false);
imageOverview.ReadPixels(new Rect(0, 0, camOV.targetTexture.width, camOV.targetTexture.height), 0,
0);
imageOverview.Apply();
RenderTexture.active = currentRT;
byte[] bytes = imageOverview.EncodeToPNG();
// save in memory
string filename = fileName(Convert.ToInt32(imageOverview.width),
Convert.ToInt32(imageOverview.height));
path = Application.persistentDataPath + "/Snapshots/" + filename;
System.IO.File.WriteAllBytes(path, bytes);
}
}
this is my above code..

use this:
Texture2D screencap;
Texture2D border;
bool shot=false;
public string path;
void Start () {
screencap=new Texture2D(300,200,TextureFormat.RGB24,false);
border=new Texture2D(2,2,TextureFormat.ARGB32,false);
border.Apply();
}
// Update is called once per frame
void Update () {
if(Input.GetKeyUp(KeyCode.Mouse0))
{
StartCoroutine("Capture");
}
}
string fileName(int width, int height)
{
return string.Format("screen_{0}x{1}_{2}.png",
width, height,
System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss"));
}
void OnGUI()
{
GUI.DrawTexture(new Rect(200,100,300,2),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(200,300,300,2),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(195,100,2,200),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(500,100,2,201),border,ScaleMode.StretchToFill);
if(shot)
{
GUI.DrawTexture(new Rect(50,10,60,40),screencap,ScaleMode.StretchToFill);
//Application.CaptureScreenshot(myFolderLocation+myFilename);
}
}
IEnumerator Capture()
{
yield return new WaitForEndOfFrame();
screencap.ReadPixels(new Rect(198,98,298,198),0,0);
screencap.Apply();
shot=true;
byte[] bytes=border.EncodeToPNG();
string filename=fileName(Convert.ToInt32(screencap.width), Convert.ToInt32(screencap.height));
Application.CaptureScreenshot("D:"+filename);
}

This script takes customised screeenshot of any Object that has RectTransform component attached to it.
using System.Collections;
using UnityEngine;
using UnityEngine.UI;
public class TakeScreenshotAndSave : MonoBehaviour
{
//Object To Screenshot
[SerializeField] private RectTransform _objToScreenshot;
//Assign the button to take screenshot on clicking
[SerializeField] private Button _takeScreenshotButton;
void Start()
{
_takeScreenshotButton.onClick.AddListener(OnClickTakeScreenshotAndSaveButton);
}
private void OnClickTakeScreenshotAndSaveButton()
{
StartCoroutine(TakeSnapShotAndSave());
}
//Using a Coroutine instead of normal method
public IEnumerator TakeSnapShotAndSave()
{
//Code will throw error at runtime if this is removed
yield return new WaitForEndOfFrame();
//Get the corners of RectTransform rect and store it in a array vector
Vector3[] corners = new Vector3[4];
_objToScreenshot.GetWorldCorners(corners);
//Remove 100 and you will get error
int width = ((int)corners[3].x - (int)corners[0].x) - 100;
int height = (int)corners[1].y - (int)corners[0].y;
var startX = corners[0].x;
var startY = corners[0].y;
//Make a temporary texture and read pixels from it
Texture2D ss = new Texture2D(width, height, TextureFormat.RGB24, false);
ss.ReadPixels(new Rect(startX, startY, width, height), 0, 0);
ss.Apply();
Debug.Log("Start X : " + startX + " Start Y : " + startY);
Debug.Log("Screen Width : " + Screen.width + " Screen Height : " +
Screen.height);
Debug.Log("Texture Width : " + width + " Texture Height : " + height);
//Save the screenshot to disk
byte[] byteArray = ss.EncodeToPNG();
string savePath = Application.persistentDataPath + "/ScreenshotSave.png";
System.IO.File.WriteAllBytes(savePath, byteArray);
Debug.Log("Screenshot Path : " + savePath);
// Destroy texture to avoid memory leaks
Destroy(ss);
}
}

Related

Capture everything inside a rect in unity

I have a rect, which has inside of it a portion of my UI, I want to take a 'screenshot' of everything inside of that rect and put it into a .jpeg file.
I have absolutely no idea how I can do this, and if it's even possible. Unfortunately, I couldn't find anything on the internet.
What you need to do is use a RenderTexture
Create a RenderTexture Unity Object in the editor
Create a Camera that sees exactly what you want in your screenshot.
Cameras in Unity have an option called Target Texture. Put your RenderTexture in this field. The result is that the camera will render in this texture instead of on the screen. See the Manual for detailed examples.
Make a script that has access to your RenderTexture Object. This script create a Texture2D using Texture2D.ReadPixels. This allows to create a Texture2D object from the texture.
Use Texture2D.EncodeToJpg to save your Texture2D in a file
You need this:
using System.Collections;
using UnityEngine;
using UnityEngine.UI;
public class TakeScreenshotAndSave : MonoBehaviour
{
//Object To Screenshot
[SerializeField] private RectTransform _objToScreenshot;
//Assign the button to take screenshot on clicking
[SerializeField] private Button _takeScreenshotButton;
void Start()
{
_takeScreenshotButton.onClick.AddListener(OnClickTakeScreenshotAndSaveButton);
}
private void OnClickTakeScreenshotAndSaveButton()
{
StartCoroutine(TakeSnapShotAndSave());
}
//Using a Coroutine instead of normal method
public IEnumerator TakeSnapShotAndSave()
{
//Code will throw error at runtime if this is removed
yield return new WaitForEndOfFrame();
//Get the corners of RectTransform rect and store it in a array vector
Vector3[] corners = new Vector3[4];
_objToScreenshot.GetWorldCorners(corners);
//Remove 100 and you will get error
int width = ((int)corners[3].x - (int)corners[0].x) - 100;
int height = (int)corners[1].y - (int)corners[0].y;
var startX = corners[0].x;
var startY = corners[0].y;
//Make a temporary texture and read pixels from it
Texture2D ss = new Texture2D(width, height, TextureFormat.RGB24, false);
ss.ReadPixels(new Rect(startX, startY, width, height), 0, 0);
ss.Apply();
Debug.Log("Start X : " + startX + " Start Y : " + startY);
Debug.Log("Screen Width : " + Screen.width + " Screen Height : " +
Screen.height);
Debug.Log("Texture Width : " + width + " Texture Height : " + height);
//Save the screenshot to disk
byte[] byteArray = ss.EncodeToPNG();
string savePath = Application.persistentDataPath + "/ScreenshotSave.png";
System.IO.File.WriteAllBytes(savePath, byteArray);
Debug.Log("Screenshot Path : " + savePath);
// Destroy texture to avoid memory leaks
Destroy(ss);
}
}

How to save a png using Unity WebCamTexture

I want to save a picture using the device native camera. Currently I cannot get the image to save to file. I have a rawimage which's texture is the native device camera image. I am taking the bytes from that rawimage and encoding to png. Then I write the png to file on my computer.
public WebCamTexture webCamTexture;
public RawImage myImage;
public void start () {
webCamTexture = new WebCamTexture ();
myImage.texture = webCamTexture;
myImage.transform.localScale = new Vector3 (1,-1,1);
webCamTexture.Play ();
int width = (int)GameObject.Find("myImage").GetComponent<Rect> ().width;
int height = (int)GameObject.Find("myImage").GetComponent<Rect>().height;
Texture2D tex = new Texture2D (width, height, TextureFormat.RGB24, false);
tex.ReadPixels (new Rect (0, 0, width, height), 0, 0);
tex.Apply ();
byte[] bytes = tex.EncodeToPNG ();
System.IO.File.WriteAllBytes(Application.dataPath + "/"+"imgcap.png",bytes);
Object.Destroy (tex);
}
using System;
using System.IO;
using System.Runtime.InteropServices;
using UnityEngine;
public class WebCamScreenShot : MonoBehaviour
{
public static int fileCounter = 0;
WebCamTexture webCamTexture;
public string path = "C:/temp/UnityScreenShots";
public string fileNamePrefix = "image_";
void Start()
{
var devices = WebCamTexture.devices;
if (devices.Length < 1) throw new System.Exception("No webcams was found");
var device = devices[0];
webCamTexture = new WebCamTexture(device.name);
webCamTexture.requestedFPS = 30;
webCamTexture.requestedWidth = 320;
webCamTexture.requestedHeight = 240;
webCamTexture.Play();
if (webCamTexture.width < 1 || webCamTexture.height < 1) throw new System.Exception("Invalid resolution");
}
public void SaveToPNG()
{
string zeros =
(fileCounter < 10000 ? "0000" :
(fileCounter < 1000 ? "000" :
(fileCounter < 100 ? "00" :
(fileCounter < 10 ? "0" : ""))));
string image_path = path + $"/{ fileNamePrefix + zeros + fileCounter }.png";
byte[] data = ScreenshotWebcam( webCamTexture );
File.WriteAllBytes(image_path, data);
fileCounter ++ ;
}
static byte[] ScreenshotWebcam(WebCamTexture wct)
{
Texture2D colorTex = new Texture2D(wct.width, wct.height, TextureFormat.RGBA32, false);
byte[] colorByteData = Color32ArrayToByteArray(wct.GetPixels32());
colorTex.LoadRawTextureData(colorByteData);
colorTex.Apply();
return colorTex.EncodeToPNG();
}
static byte[] Color32ArrayToByteArray(Color32[] colors)
{
// https://stackoverflow.com/a/21575147/2496170
if (colors == null || colors.Length == 0) return null;
int lengthOfColor32 = Marshal.SizeOf(typeof(Color32));
int length = lengthOfColor32 * colors.Length;
byte[] bytes = new byte[length];
GCHandle handle = default(GCHandle);
try
{
handle = GCHandle.Alloc(colors, GCHandleType.Pinned);
IntPtr ptr = handle.AddrOfPinnedObject();
Marshal.Copy(ptr, bytes, 0, length);
}
finally
{
if (handle != default(GCHandle)) handle.Free();
}
return bytes;
}
}

How to prevent override of member variables set inside of a static method in c#

I am having an issue where the variable in my class, which is set in a static method of the class; is constantly overridden by other calls of the method.
For better understanding this script is used to create a Texture on the fly which is only 1 pixel in width and height. In usage this texture can then be "stretched" over an area. However the texture cannot be saved. which is fine.
public class DrawPixelTexture
{
private static Texture2D t1 = new Texture2D(1, 1, TextureFormat.RGBA32, true);
static DrawPixelTexture()
{
t1.hideFlags = HideFlags.HideAndDontSave;
}
public static void Texture(Rect rect, Color colour, float opacity = 1)
{
colour.a = opacity;
// ensure that we only call Apply() once by reading the colour of the pixel at 0,0
// and seeing id it is the same as 'colour'
if (t1.GetPixel(0, 0) != colour)
{
Debug.Log("still being overriden");
t1.SetPixel(0, 0, colour);
t1.Apply();
}
Graphics.DrawTexture(rect, t1);
}
}
here is a usage example
public class TestDrawTexture
{
void OnGUI()
{
DrawPixelTexture.Texture(new Rect(0, 0, 100, 20), Color.gray);
DrawPixelTexture.Texture(new Rect(0, 0, 100, 20), Color.blue);
}
}
"still being overriden" will be printed out constantly because the gray colour which was set is then overriden by the blue colour and vice-versa
how do I go about fixing this ?
This is doing exactly what static is supposed to do which is to only have on instance of it. Remove the static from your functions and variables and then create 2 instance of DrawPixelTexture. One for gray and one for blue.
Note that you should not be using the OnGUI function. To display a UI, use, the Image or RawImage components then assign texture to them. If this is just a 2D Sprite then use SpriteRenderer. Don't use OnGUI.
DrawPixelTexture script:
public class DrawPixelTexture
{
private Texture2D t1 = new Texture2D(1, 1, TextureFormat.RGBA32, true);
public DrawPixelTexture()
{
t1.hideFlags = HideFlags.HideAndDontSave;
}
public void txture(Rect rect, Color colour, float opacity = 1)
{
colour.a = opacity;
// ensure that we only call Apply() once by reading the colour of the pixel at 0,0 and seeing id it is the same as 'colour'
if (t1.GetPixel(0, 0) != colour)
{
Debug.Log("still being overriden");
t1.SetPixel(0, 0, colour);
t1.Apply();
}
Graphics.DrawTexture(rect, t1);
}
}
TestDrawTexture script:
public class TestDrawTexture : MonoBehaviour
{
DrawPixelTexture gray;
DrawPixelTexture blue;
bool firstRun;
// Use this for initialization
void OnGUI()
{
if (firstRun)
{
gray = new DrawPixelTexture();
blue = new DrawPixelTexture();
firstRun = false;
}
gray.txture(new Rect(0, 0, 100, 20), Color.gray);
blue.txture(new Rect(0, 0, 100, 20), Color.blue);
}
}
I suggest creating a cache where you store the texture for each colour you already used.
public class DrawPixelTexture
{
private static readonly Dictionary<Color, Texture2D> texturesCache = new Dictionary<Color, Texture2D>();
public static void ClearCache()
{
texturesCache.Clear();
}
public static void Texture(Rect rect, Color colour, float opacity = 1)
{
colour.a = opacity;
Texture2D texture;
if (!texturesCache.TryGetValue(colour, out texture))
{
Debug.Log("still being created");
texture = new Texture2D(1, 1, TextureFormat.RGBA32, true);
texture.hideFlags = HideFlags.HideAndDontSave;
texture.SetPixel(0, 0, colour);
texture.Apply();
texturesCache.Add(colour, texture);
}
Graphics.DrawTexture(rect, texture);
}
}

Rendering SurfaceTexture to Unity Texture2D

I came up with simillar questions earlier, but they weren't good clarified and right now I would like to take an advice what's wrong I'm doing in my code.
So what I'm trying to do is rendering SurfaceTexture from Android plugin to Unity Texture2D.
Unity code:
public class AndroidHandler : MonoBehaviour {
[SerializeField]
private RawImage _rawImage;
private Texture2D _inputTexture;
private AndroidJavaObject androidStreamerObj;
private System.IntPtr _nativePtr;
void Start () {
_rawImage.material.SetTextureScale("_MainTex", new Vector2(-1, -1));
InitAndroidStreamerObject();
}
private void InitAndroidStreamerObject()
{
androidStreamerObj = new AndroidJavaObject("makeitbetter.figazzz.com.vitamiousing7.AndroidStreamer");
Int32 texPtr = androidStreamerObj.Call <Int32> ("GetTexturePtr");
Debug.Log("texture pointer? " + texPtr);
Texture2D nativeTexture = Texture2D.CreateExternalTexture (128, 128, TextureFormat.RGBA32 , false, false, new System.IntPtr(texPtr));
_rawImage.texture = nativeTexture;
}
public void StartStream()
{
string streamLink = "rtmp://live.hkstv.hk.lxdns.com/live/hks"; //"rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
androidStreamerObj.Call("LaunchStream", streamLink);
}
void Update()
{
androidStreamerObj.Call("DrawFrame");
}
}
I'm asking my Android plugin to create openGLTexture and I'm using the pointer of the brand-new texture to allocate Texture2D in Unity.
Android plugin code:
public class AndroidStreamer {
private final int FLOAT_SIZE_BYTES = 4;
private final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private Activity _currActivity;
private VideoView _streamConnection;
private Surface _cachedSurface;
private SurfaceTexture _cachedSurfaceTexture;
private Boolean isNewFrame = false;
//open gl
private int texWidth = 128;
private int texHeight = 128;
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int glProgram;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private int unityTextureID = -1;
private int mTextureId = -1; //surface texture id
private int idFBO = -1;
private int idRBO = -1;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
public AndroidStreamer() {
Log.d("Unity", "AndroidStreamer was initialized");
_currActivity = UnityPlayer.currentActivity;
Vitamio.isInitialized(_currActivity);
_currActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
_streamConnection = new VideoView(_currActivity);
_currActivity.addContentView(_streamConnection, new FrameLayout.LayoutParams(100, 100));
}
});
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
initShaderProgram();
}
private void initShaderProgram()
{
Log.d("Unity", "initShaderProgram");
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
glProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(glProgram, vertexShader);
checkGlError("glAttachVertexShader");
GLES20.glAttachShader(glProgram, fragmentShader);
checkGlError("glAttachFragmentShader");
GLES20.glLinkProgram(glProgram);
checkGlError("glLinkProgram");
maPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
checkLocation(maPositionHandle, "aPosition");
maTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
checkLocation(maTextureHandle, "aTextureCoord");
muMVPMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uMVPMatrix");
checkLocation(muMVPMatrixHandle, "uVMPMatrix");
muSTMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uSTMatrix");
checkLocation(muSTMatrixHandle, "uSTMatrix");
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("Unity", "Could not compile shader " + shaderType + ":");
Log.e("Unity", GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private void checkLocation(int location, String label) {
if (location < 0) {
throw new RuntimeException("Unable to locate '" + label + "' in program");
}
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e("Unity", op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private void checkFrameBufferStatus()
{
int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
checkGlError("glCheckFramebufferStatus");
switch (status)
{
case GLES20.GL_FRAMEBUFFER_COMPLETE:
Log.d("Unity", "complete");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
Log.e("Unity", "incomplete attachment");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
Log.e("Unity", "incomplete missing attachment");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS:
Log.e("Unity", "incomplete dimensions");
break;
case GLES20.GL_FRAMEBUFFER_UNSUPPORTED:
Log.e("Unity", "framebuffer unsupported");
break;
default : Log.d("Unity", "default");
}
}
private void initGLTexture()
{
Log.d("Unity", "initGLTexture");
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("glGenTextures initGLTexture");
mTextureId = textures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("glActiveTexture initGLTexture");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
checkGlError("glBindTexture initGLTexture");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
checkGlError("glTexParameterf initGLTexture");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
checkGlError("glTexParameterf initGLTexture");
}
public int GetTexturePtr()
{
Bitmap bitmap = Bitmap.createBitmap(texWidth, texHeight, Bitmap.Config.ARGB_8888);
for(int x = 0; x < texWidth; x++)
{
for (int y = 0; y < texHeight; y++)
{
bitmap.setPixel(x, y, Color.argb(155, 255, 50, 255));
}
}
Log.d("Unity", "Bitmap is: " + bitmap);
ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(buffer);
//GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
//checkGlError("glEnable GetTexturePtr");
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("0");
unityTextureID = textures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("1");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, unityTextureID);
checkGlError("2");
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, texWidth, texHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
checkGlError("12");
//GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//checkGlError("3");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
checkGlError("4");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
checkGlError("5");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("6");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("7");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
checkGlError("8");
setupBuffers();
Log.d("Unity", "texture id returned: " + unityTextureID);
return unityTextureID;
}
private void setupBuffers()
{
Log.d("Unity", "setupBuffers");
//framebuffer
int buffers[] = new int[1];
GLES20.glGenFramebuffers(1, buffers, 0);
checkGlError("9");
idFBO = buffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
checkGlError("10");
//render buffer
int rbuffers[] = new int[1];
GLES20.glGenRenderbuffers(1, rbuffers, 0);
checkGlError("glGenRenderBuffers setupBuffers");
idRBO = rbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, idRBO);
checkGlError("glBindRenderBuffer setupBuffers");
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_RGBA4, texWidth, texHeight);
checkGlError("glRenderBufferStorage setupBuffers");
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_RENDERBUFFER, idRBO);
checkGlError("glFramebufferRenderbuffer setupBuffers");
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, unityTextureID, 0);
checkGlError("glFrameBufferTexture2D");
checkFrameBufferStatus();
GLES20.glClearColor(1.0f, 0.5f, 0.0f, 1.0f);
checkGlError("glClearColor setupBuffers");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
checkGlError("glClear setupBuffers");
}
public void DrawFrame()
{
if(isNewFrame && mSTMatrix != null) {
int[] testBuffer = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, testBuffer, 0);
Log.d("Unity", "DrawFrame binded = " + testBuffer[0] + " idFBO = " + idFBO);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
checkGlError("glBindFrameBuffer DrawFrame");
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
checkGlError("glClearColor DrawFrame");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
checkGlError("glClear DrawFrame");
GLES20.glUseProgram(glProgram);
checkGlError("glUseProgram DrawFrame");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("glActiveTexture DrawFrame");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
checkGlError("glBindTexture DrawFrame");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer DrawFrame");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray DrawFrame");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
checkGlError("glUniformMatrix4fv MVP onFrameAvailable");
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
checkGlError("glUniformMatrix4fv ST onFrameAvailable");
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays onFrameAvailable");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
checkGlError("glBindFrameBuffer 0 onFrameAvailable");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
checkGlError("glBindTexture onFrameAvailable");
isNewFrame = false;
}
}
public void LaunchStream(String streamLink) {
final String path = streamLink; //"http://dlqncdn.miaopai.com/stream/MVaux41A4lkuWloBbGUGaQ__.mp4"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
Log.i("Unity", "hop hop1 = " + path);
_currActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
_streamConnection.setVideoPath(path);
_streamConnection.setMediaController(new MediaController(_currActivity));
_streamConnection.requestFocus();
_streamConnection.setOnErrorListener(new MediaPlayer.OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.i("Unity", "some error, I don't know. what = " + what + " extra = " + extra);
return false;
}
});
_streamConnection.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mediaPlayer) {
// optional need Vitamio 4.0
Log.i("Unity", "hop hop5");
mediaPlayer.setPlaybackSpeed(1.0f);
}
});
initGLTexture();
_cachedSurfaceTexture = new SurfaceTexture(mTextureId);
_cachedSurfaceTexture.setDefaultBufferSize(texWidth, texHeight);
_cachedSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
surfaceTexture.updateTexImage();
mSTMatrix = new float[16];
surfaceTexture.getTransformMatrix(mSTMatrix);
isNewFrame = true;
}
}
});
_cachedSurface = new Surface(_cachedSurfaceTexture);
_streamConnection.setSurfaceToPlayer(_cachedSurface);
Log.i("Unity", "You're the best around!");
}
});
}
}
I decided to provide the all code of my Android plugin in order to give the most clear understanding of situation I'm having. Basically, what I'm trying to do:
I call method "GetTexturePtr" from Unity side, it creates GL_TEXTURE_2D texture which I apply to Unity Texture2D. Also in the Android side I setup frame and render buffers for changing color of this texture. It works fine because it fills with color just perfectly.
Then I call method "LaunchStream", where creates GL_TEXTURE_EXTERNAL_OES texture (in "initGLTexture()" method) and this texture applies to SurfaceTexture.
Also in the Unity Update() method I call android method "DrawFrame()" which should update my Unity texture according to SurfaceTexture changes.
Right now I'm having the glError 1282 on GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); and of course texture just fills with green color here
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
checkGlError("glClearColor DrawFrame");
What I'm doing wrong?
Few people know this trick.
I'd like to give you some brief and I think you can figure out the rest:
First you need a ImageReader, it can accept surface that you want to read, and it has a callback ImageReader.OnImageAvailableListener once the image is ready your code can get called.
Use ImageReader.acquireLatestImage() to get a Image
Use Image.getHardwareBuffer() to get a HardwareBuffer
Pass the HardwareBuffer to your JNI function and update your texture
//Target your texture
glBindTexture(GL_TEXTURE_2D, textureName);
// Get native AHardwareBuffer
AHardwareBuffer *hwbuffer = AHardwareBuffer_fromHardwareBuffer(env, hardwareBuffer);
// Create EGLClientBuffer from the AHardwareBuffer.
EGLClientBuffer native_buffer = eglGetNativeClientBufferANDROID(hwbuffer);
// Destroy last created EGLImageKHR
if (cachedImages.find(textureName) != cachedImages.end()){
eglDestroyImageKHR(eglGetCurrentDisplay(), cachedImages[textureName]);
}
// Begin to make new EGLImageKHR
EGLImageKHR image {EGL_NO_IMAGE_KHR};
EGLint attrs[] = {
EGL_IMAGE_PRESERVED_KHR,
EGL_TRUE,
EGL_NONE,
};
// Create EGLImage from EGLClientBuffer.
image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, native_buffer, attrs);
if (image == EGL_NO_IMAGE_KHR) {
LOGE("Failed to create EGLImage.");
return false;
}
// Cache the image
cachedImages[textureName] = image;
// Get glEGLImageTargetTexture2DOES
if (!isGlEGLImageTargetTexture2DOESInited) {
glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) eglGetProcAddress("glEGLImageTargetTexture2DOES");
isGlEGLImageTargetTexture2DOESInited = true;
}
if(glEGLImageTargetTexture2DOES == NULL){
LOGE("Error: Failed to find glEGLImageTargetTexture2DOES at %s:%in", __FILE__, __LINE__);
return false;
}
// Allocate the OpenGL texture using the EGLImage.
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
//Not GL_TEXTURE_EXTERNAL_OES
//glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
glBindTexture(GL_TEXTURE_2D, 0);
Now you have updated texturename, which is you created in your code before(from native or Android EGL or Unity)
The whole process is like:
ImageReader's callback sets a image ready flag,
Unity's Update() check if there is image ready
Update the texture by using the code above.
You can't call surfaceTexture.updateTexImage(); in onFrameAvailable, call it in DrawFrame() .
And in Unity3D:
void Update()
{
androidStreamerObj.Call("DrawFrame");
GL.InvalidateState(); // ADD it
}
I tried this one and it works https://github.com/hywenbinger/android_unity_video
Just create SurfaceTexture, and attach Unity material with shader from the project. Don't need in native shaders and buffers.

How do I capture a screenshot in Unity3d with a transparent background?

I'm trying to capture a screenshot of a gameobject in Unity3D Pro so that it has a transparent background. This script was suggested to me and it works, when connected to the main Camera, as long as the material doesn't have a texture. Then I get a semi transparency appearing on the gameobject as shown in this example. http://sta.sh/0iwguk5rx61. Any help with this would be greatly appreciated.
public int resWidth = 2550;
public int resHeight = 3300;
private bool takeHiResShot = false;
public static string ScreenShotName(int width, int height) {
return string.Format("{0}/screen_{1}x{2}_{3}.png",
Application.dataPath,
width, height,
System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss"));
}
public void TakeHiResShot() {
takeHiResShot = true;
}
void LateUpdate() {
takeHiResShot |= Input.GetKeyDown("k");
if (takeHiResShot)
{
RenderTexture rt = new RenderTexture(resWidth, resHeight, 24);
camera.targetTexture = rt;
Texture2D screenShot = new Texture2D(resWidth, resHeight, TextureFormat.ARGB32, false);
camera.Render();
RenderTexture.active = rt;
screenShot.ReadPixels(new Rect(0, 0, resWidth, resHeight), 0, 0);
camera.targetTexture = null;
RenderTexture.active = null;
Destroy(rt);
byte[] bytes = screenShot.EncodeToPNG();
string filename = ScreenShotName(resWidth, resHeight);
System.IO.File.WriteAllBytes(filename, bytes);
Debug.Log(string.Format("Took screenshot to: {0}", filename));
Application.OpenURL(filename);
takeHiResShot = false;
}
}
sdg
See if this works for you.
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
public class Screenshot : MonoBehaviour
{
private void Start()
{
string filename = string.Format("Assets/Screenshots/capture_{0}.png", DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss-fff"));
if (!Directory.Exists("Assets/Screenshots"))
{
Directory.CreateDirectory("Assets/Screenshots");
}
TakeTransparentScreenshot(Camera.main, Screen.width, Screen.height, filename);
}
public static void TakeTransparentScreenshot(Camera cam, int width, int height, string savePath)
{
// Depending on your render pipeline, this may not work.
var bak_cam_targetTexture = cam.targetTexture;
var bak_cam_clearFlags = cam.clearFlags;
var bak_RenderTexture_active = RenderTexture.active;
var tex_transparent = new Texture2D(width, height, TextureFormat.ARGB32, false);
// Must use 24-bit depth buffer to be able to fill background.
var render_texture = RenderTexture.GetTemporary(width, height, 24, RenderTextureFormat.ARGB32);
var grab_area = new Rect(0, 0, width, height);
RenderTexture.active = render_texture;
cam.targetTexture = render_texture;
cam.clearFlags = CameraClearFlags.SolidColor;
// Simple: use a clear background
cam.backgroundColor = Color.clear;
cam.Render();
tex_transparent.ReadPixels(grab_area, 0, 0);
tex_transparent.Apply();
// Encode the resulting output texture to a byte array then write to the file
byte[] pngShot = ImageConversion.EncodeToPNG(tex_transparent);
File.WriteAllBytes(savePath, pngShot);
cam.clearFlags = bak_cam_clearFlags;
cam.targetTexture = bak_cam_targetTexture;
RenderTexture.active = bak_RenderTexture_active;
RenderTexture.ReleaseTemporary(render_texture);
Texture2D.Destroy(tex_transparent);
}
}
You might have to refresh your assets folder (Ctrl+R) to make Screenshots folder appear in the inspector.
I am Colombian , and my English is not good, I hope you understand me,
I had the same problem and solved it by just changing the TextureFormat ARGB32 to RGB24:
...
RenderTexture rt = new RenderTexture(resWidth, resHeight, 24);
camera.targetTexture = rt;
Texture2D screenShot = new Texture2D(resWidth, resHeight, TextureFormat.RGB24, false);
camera.Render();
RenderTexture.active = rt;
...
I hope to be helpful
see u, :D

Categories

Resources