I came up with simillar questions earlier, but they weren't good clarified and right now I would like to take an advice what's wrong I'm doing in my code.
So what I'm trying to do is rendering SurfaceTexture from Android plugin to Unity Texture2D.
Unity code:
public class AndroidHandler : MonoBehaviour {
[SerializeField]
private RawImage _rawImage;
private Texture2D _inputTexture;
private AndroidJavaObject androidStreamerObj;
private System.IntPtr _nativePtr;
void Start () {
_rawImage.material.SetTextureScale("_MainTex", new Vector2(-1, -1));
InitAndroidStreamerObject();
}
private void InitAndroidStreamerObject()
{
androidStreamerObj = new AndroidJavaObject("makeitbetter.figazzz.com.vitamiousing7.AndroidStreamer");
Int32 texPtr = androidStreamerObj.Call <Int32> ("GetTexturePtr");
Debug.Log("texture pointer? " + texPtr);
Texture2D nativeTexture = Texture2D.CreateExternalTexture (128, 128, TextureFormat.RGBA32 , false, false, new System.IntPtr(texPtr));
_rawImage.texture = nativeTexture;
}
public void StartStream()
{
string streamLink = "rtmp://live.hkstv.hk.lxdns.com/live/hks"; //"rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
androidStreamerObj.Call("LaunchStream", streamLink);
}
void Update()
{
androidStreamerObj.Call("DrawFrame");
}
}
I'm asking my Android plugin to create openGLTexture and I'm using the pointer of the brand-new texture to allocate Texture2D in Unity.
Android plugin code:
public class AndroidStreamer {
private final int FLOAT_SIZE_BYTES = 4;
private final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
private final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
private final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
private Activity _currActivity;
private VideoView _streamConnection;
private Surface _cachedSurface;
private SurfaceTexture _cachedSurfaceTexture;
private Boolean isNewFrame = false;
//open gl
private int texWidth = 128;
private int texHeight = 128;
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
private int glProgram;
private int muMVPMatrixHandle;
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
private int unityTextureID = -1;
private int mTextureId = -1; //surface texture id
private int idFBO = -1;
private int idRBO = -1;
private final float[] mTriangleVerticesData = {
// X, Y, Z, U, V
-1.0f, -1.0f, 0, 0.f, 0.f,
1.0f, -1.0f, 0, 1.f, 0.f,
-1.0f, 1.0f, 0, 0.f, 1.f,
1.0f, 1.0f, 0, 1.f, 1.f,
};
private FloatBuffer mTriangleVertices;
private final String vertexShaderCode =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main() {\n" +
" gl_Position = uMVPMatrix * aPosition;\n" +
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
private final String fragmentShaderCode =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
"uniform samplerExternalOES sTexture;\n" +
"void main() {\n" +
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
public AndroidStreamer() {
Log.d("Unity", "AndroidStreamer was initialized");
_currActivity = UnityPlayer.currentActivity;
Vitamio.isInitialized(_currActivity);
_currActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
_streamConnection = new VideoView(_currActivity);
_currActivity.addContentView(_streamConnection, new FrameLayout.LayoutParams(100, 100));
}
});
mTriangleVertices = ByteBuffer.allocateDirect(
mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
initShaderProgram();
}
private void initShaderProgram()
{
Log.d("Unity", "initShaderProgram");
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
glProgram = GLES20.glCreateProgram();
GLES20.glAttachShader(glProgram, vertexShader);
checkGlError("glAttachVertexShader");
GLES20.glAttachShader(glProgram, fragmentShader);
checkGlError("glAttachFragmentShader");
GLES20.glLinkProgram(glProgram);
checkGlError("glLinkProgram");
maPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
checkLocation(maPositionHandle, "aPosition");
maTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
checkLocation(maTextureHandle, "aTextureCoord");
muMVPMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uMVPMatrix");
checkLocation(muMVPMatrixHandle, "uVMPMatrix");
muSTMatrixHandle = GLES20.glGetUniformLocation(glProgram, "uSTMatrix");
checkLocation(muSTMatrixHandle, "uSTMatrix");
}
private int loadShader(int shaderType, String source) {
int shader = GLES20.glCreateShader(shaderType);
if (shader != 0) {
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled = new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
Log.e("Unity", "Could not compile shader " + shaderType + ":");
Log.e("Unity", GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader = 0;
}
}
return shader;
}
private void checkLocation(int location, String label) {
if (location < 0) {
throw new RuntimeException("Unable to locate '" + label + "' in program");
}
}
private void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e("Unity", op + ": glError " + error);
throw new RuntimeException(op + ": glError " + error);
}
}
private void checkFrameBufferStatus()
{
int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
checkGlError("glCheckFramebufferStatus");
switch (status)
{
case GLES20.GL_FRAMEBUFFER_COMPLETE:
Log.d("Unity", "complete");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
Log.e("Unity", "incomplete attachment");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
Log.e("Unity", "incomplete missing attachment");
break;
case GLES20.GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS:
Log.e("Unity", "incomplete dimensions");
break;
case GLES20.GL_FRAMEBUFFER_UNSUPPORTED:
Log.e("Unity", "framebuffer unsupported");
break;
default : Log.d("Unity", "default");
}
}
private void initGLTexture()
{
Log.d("Unity", "initGLTexture");
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("glGenTextures initGLTexture");
mTextureId = textures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("glActiveTexture initGLTexture");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
checkGlError("glBindTexture initGLTexture");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
checkGlError("glTexParameterf initGLTexture");
GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
checkGlError("glTexParameterf initGLTexture");
}
public int GetTexturePtr()
{
Bitmap bitmap = Bitmap.createBitmap(texWidth, texHeight, Bitmap.Config.ARGB_8888);
for(int x = 0; x < texWidth; x++)
{
for (int y = 0; y < texHeight; y++)
{
bitmap.setPixel(x, y, Color.argb(155, 255, 50, 255));
}
}
Log.d("Unity", "Bitmap is: " + bitmap);
ByteBuffer buffer = ByteBuffer.allocate(bitmap.getByteCount());
bitmap.copyPixelsToBuffer(buffer);
//GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
//checkGlError("glEnable GetTexturePtr");
int textures[] = new int[1];
GLES20.glGenTextures(1, textures, 0);
checkGlError("0");
unityTextureID = textures[0];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("1");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, unityTextureID);
checkGlError("2");
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, texWidth, texHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
checkGlError("12");
//GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
//checkGlError("3");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
checkGlError("4");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
checkGlError("5");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("6");
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
checkGlError("7");
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
checkGlError("8");
setupBuffers();
Log.d("Unity", "texture id returned: " + unityTextureID);
return unityTextureID;
}
private void setupBuffers()
{
Log.d("Unity", "setupBuffers");
//framebuffer
int buffers[] = new int[1];
GLES20.glGenFramebuffers(1, buffers, 0);
checkGlError("9");
idFBO = buffers[0];
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
checkGlError("10");
//render buffer
int rbuffers[] = new int[1];
GLES20.glGenRenderbuffers(1, rbuffers, 0);
checkGlError("glGenRenderBuffers setupBuffers");
idRBO = rbuffers[0];
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, idRBO);
checkGlError("glBindRenderBuffer setupBuffers");
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_RGBA4, texWidth, texHeight);
checkGlError("glRenderBufferStorage setupBuffers");
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_RENDERBUFFER, idRBO);
checkGlError("glFramebufferRenderbuffer setupBuffers");
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, unityTextureID, 0);
checkGlError("glFrameBufferTexture2D");
checkFrameBufferStatus();
GLES20.glClearColor(1.0f, 0.5f, 0.0f, 1.0f);
checkGlError("glClearColor setupBuffers");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
checkGlError("glClear setupBuffers");
}
public void DrawFrame()
{
if(isNewFrame && mSTMatrix != null) {
int[] testBuffer = new int[1];
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, testBuffer, 0);
Log.d("Unity", "DrawFrame binded = " + testBuffer[0] + " idFBO = " + idFBO);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, idFBO);
checkGlError("glBindFrameBuffer DrawFrame");
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
checkGlError("glClearColor DrawFrame");
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
checkGlError("glClear DrawFrame");
GLES20.glUseProgram(glProgram);
checkGlError("glUseProgram DrawFrame");
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
checkGlError("glActiveTexture DrawFrame");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
checkGlError("glBindTexture DrawFrame");
mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
checkGlError("glVertexAttribPointer DrawFrame");
GLES20.glEnableVertexAttribArray(maTextureHandle);
checkGlError("glEnableVertexAttribArray DrawFrame");
Matrix.setIdentityM(mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
checkGlError("glUniformMatrix4fv MVP onFrameAvailable");
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
checkGlError("glUniformMatrix4fv ST onFrameAvailable");
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
checkGlError("glDrawArrays onFrameAvailable");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
checkGlError("glBindFrameBuffer 0 onFrameAvailable");
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
checkGlError("glBindTexture onFrameAvailable");
isNewFrame = false;
}
}
public void LaunchStream(String streamLink) {
final String path = streamLink; //"http://dlqncdn.miaopai.com/stream/MVaux41A4lkuWloBbGUGaQ__.mp4"; //"rtmp://live.hkstv.hk.lxdns.com/live/hks";
Log.i("Unity", "hop hop1 = " + path);
_currActivity.runOnUiThread(new Runnable() {
#Override
public void run() {
_streamConnection.setVideoPath(path);
_streamConnection.setMediaController(new MediaController(_currActivity));
_streamConnection.requestFocus();
_streamConnection.setOnErrorListener(new MediaPlayer.OnErrorListener() {
#Override
public boolean onError(MediaPlayer mp, int what, int extra) {
Log.i("Unity", "some error, I don't know. what = " + what + " extra = " + extra);
return false;
}
});
_streamConnection.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
#Override
public void onPrepared(MediaPlayer mediaPlayer) {
// optional need Vitamio 4.0
Log.i("Unity", "hop hop5");
mediaPlayer.setPlaybackSpeed(1.0f);
}
});
initGLTexture();
_cachedSurfaceTexture = new SurfaceTexture(mTextureId);
_cachedSurfaceTexture.setDefaultBufferSize(texWidth, texHeight);
_cachedSurfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
#Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
surfaceTexture.updateTexImage();
mSTMatrix = new float[16];
surfaceTexture.getTransformMatrix(mSTMatrix);
isNewFrame = true;
}
}
});
_cachedSurface = new Surface(_cachedSurfaceTexture);
_streamConnection.setSurfaceToPlayer(_cachedSurface);
Log.i("Unity", "You're the best around!");
}
});
}
}
I decided to provide the all code of my Android plugin in order to give the most clear understanding of situation I'm having. Basically, what I'm trying to do:
I call method "GetTexturePtr" from Unity side, it creates GL_TEXTURE_2D texture which I apply to Unity Texture2D. Also in the Android side I setup frame and render buffers for changing color of this texture. It works fine because it fills with color just perfectly.
Then I call method "LaunchStream", where creates GL_TEXTURE_EXTERNAL_OES texture (in "initGLTexture()" method) and this texture applies to SurfaceTexture.
Also in the Unity Update() method I call android method "DrawFrame()" which should update my Unity texture according to SurfaceTexture changes.
Right now I'm having the glError 1282 on GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); and of course texture just fills with green color here
GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
checkGlError("glClearColor DrawFrame");
What I'm doing wrong?
Few people know this trick.
I'd like to give you some brief and I think you can figure out the rest:
First you need a ImageReader, it can accept surface that you want to read, and it has a callback ImageReader.OnImageAvailableListener once the image is ready your code can get called.
Use ImageReader.acquireLatestImage() to get a Image
Use Image.getHardwareBuffer() to get a HardwareBuffer
Pass the HardwareBuffer to your JNI function and update your texture
//Target your texture
glBindTexture(GL_TEXTURE_2D, textureName);
// Get native AHardwareBuffer
AHardwareBuffer *hwbuffer = AHardwareBuffer_fromHardwareBuffer(env, hardwareBuffer);
// Create EGLClientBuffer from the AHardwareBuffer.
EGLClientBuffer native_buffer = eglGetNativeClientBufferANDROID(hwbuffer);
// Destroy last created EGLImageKHR
if (cachedImages.find(textureName) != cachedImages.end()){
eglDestroyImageKHR(eglGetCurrentDisplay(), cachedImages[textureName]);
}
// Begin to make new EGLImageKHR
EGLImageKHR image {EGL_NO_IMAGE_KHR};
EGLint attrs[] = {
EGL_IMAGE_PRESERVED_KHR,
EGL_TRUE,
EGL_NONE,
};
// Create EGLImage from EGLClientBuffer.
image = eglCreateImageKHR(eglGetCurrentDisplay(), EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID, native_buffer, attrs);
if (image == EGL_NO_IMAGE_KHR) {
LOGE("Failed to create EGLImage.");
return false;
}
// Cache the image
cachedImages[textureName] = image;
// Get glEGLImageTargetTexture2DOES
if (!isGlEGLImageTargetTexture2DOESInited) {
glEGLImageTargetTexture2DOES = (PFNGLEGLIMAGETARGETTEXTURE2DOESPROC) eglGetProcAddress("glEGLImageTargetTexture2DOES");
isGlEGLImageTargetTexture2DOESInited = true;
}
if(glEGLImageTargetTexture2DOES == NULL){
LOGE("Error: Failed to find glEGLImageTargetTexture2DOES at %s:%in", __FILE__, __LINE__);
return false;
}
// Allocate the OpenGL texture using the EGLImage.
glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, image);
//Not GL_TEXTURE_EXTERNAL_OES
//glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
glBindTexture(GL_TEXTURE_2D, 0);
Now you have updated texturename, which is you created in your code before(from native or Android EGL or Unity)
The whole process is like:
ImageReader's callback sets a image ready flag,
Unity's Update() check if there is image ready
Update the texture by using the code above.
You can't call surfaceTexture.updateTexImage(); in onFrameAvailable, call it in DrawFrame() .
And in Unity3D:
void Update()
{
androidStreamerObj.Call("DrawFrame");
GL.InvalidateState(); // ADD it
}
I tried this one and it works https://github.com/hywenbinger/android_unity_video
Just create SurfaceTexture, and attach Unity material with shader from the project. Don't need in native shaders and buffers.
Related
I'm using Unity 2018. In my project i have to take particular area screen. I have using the below code. It is working. But The exact image is not working. It goes some extent. How can i take the exact image.
using UnityEngine;
using System.Collections;
using System;
public class ScreenCapture : MonoBehaviour
{
public RenderTexture overviewTexture;
GameObject OVcamera;
public string path = "";
void Start()
{
OVcamera = GameObject.FindGameObjectWithTag("OverviewCamera");
}
void LateUpdate()
{
if (Input.GetKeyDown("f9"))
{
StartCoroutine(TakeScreenShot());
}
}
// return file name
string fileName(int width, int height)
{
return string.Format("screen_{0}x{1}_{2}.png",
width, height,
System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss"));
}
public IEnumerator TakeScreenShot()
{
yield return new WaitForEndOfFrame();
Camera camOV = OVcamera.camera;
RenderTexture currentRT = RenderTexture.active;
RenderTexture.active = camOV.targetTexture;
camOV.Render();
Texture2D imageOverview = new Texture2D(camOV.targetTexture.width, camOV.targetTexture.height,
TextureFormat.RGB24, false);
imageOverview.ReadPixels(new Rect(0, 0, camOV.targetTexture.width, camOV.targetTexture.height), 0,
0);
imageOverview.Apply();
RenderTexture.active = currentRT;
byte[] bytes = imageOverview.EncodeToPNG();
// save in memory
string filename = fileName(Convert.ToInt32(imageOverview.width),
Convert.ToInt32(imageOverview.height));
path = Application.persistentDataPath + "/Snapshots/" + filename;
System.IO.File.WriteAllBytes(path, bytes);
}
}
this is my above code..
use this:
Texture2D screencap;
Texture2D border;
bool shot=false;
public string path;
void Start () {
screencap=new Texture2D(300,200,TextureFormat.RGB24,false);
border=new Texture2D(2,2,TextureFormat.ARGB32,false);
border.Apply();
}
// Update is called once per frame
void Update () {
if(Input.GetKeyUp(KeyCode.Mouse0))
{
StartCoroutine("Capture");
}
}
string fileName(int width, int height)
{
return string.Format("screen_{0}x{1}_{2}.png",
width, height,
System.DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss"));
}
void OnGUI()
{
GUI.DrawTexture(new Rect(200,100,300,2),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(200,300,300,2),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(195,100,2,200),border,ScaleMode.StretchToFill);
GUI.DrawTexture(new Rect(500,100,2,201),border,ScaleMode.StretchToFill);
if(shot)
{
GUI.DrawTexture(new Rect(50,10,60,40),screencap,ScaleMode.StretchToFill);
//Application.CaptureScreenshot(myFolderLocation+myFilename);
}
}
IEnumerator Capture()
{
yield return new WaitForEndOfFrame();
screencap.ReadPixels(new Rect(198,98,298,198),0,0);
screencap.Apply();
shot=true;
byte[] bytes=border.EncodeToPNG();
string filename=fileName(Convert.ToInt32(screencap.width), Convert.ToInt32(screencap.height));
Application.CaptureScreenshot("D:"+filename);
}
This script takes customised screeenshot of any Object that has RectTransform component attached to it.
using System.Collections;
using UnityEngine;
using UnityEngine.UI;
public class TakeScreenshotAndSave : MonoBehaviour
{
//Object To Screenshot
[SerializeField] private RectTransform _objToScreenshot;
//Assign the button to take screenshot on clicking
[SerializeField] private Button _takeScreenshotButton;
void Start()
{
_takeScreenshotButton.onClick.AddListener(OnClickTakeScreenshotAndSaveButton);
}
private void OnClickTakeScreenshotAndSaveButton()
{
StartCoroutine(TakeSnapShotAndSave());
}
//Using a Coroutine instead of normal method
public IEnumerator TakeSnapShotAndSave()
{
//Code will throw error at runtime if this is removed
yield return new WaitForEndOfFrame();
//Get the corners of RectTransform rect and store it in a array vector
Vector3[] corners = new Vector3[4];
_objToScreenshot.GetWorldCorners(corners);
//Remove 100 and you will get error
int width = ((int)corners[3].x - (int)corners[0].x) - 100;
int height = (int)corners[1].y - (int)corners[0].y;
var startX = corners[0].x;
var startY = corners[0].y;
//Make a temporary texture and read pixels from it
Texture2D ss = new Texture2D(width, height, TextureFormat.RGB24, false);
ss.ReadPixels(new Rect(startX, startY, width, height), 0, 0);
ss.Apply();
Debug.Log("Start X : " + startX + " Start Y : " + startY);
Debug.Log("Screen Width : " + Screen.width + " Screen Height : " +
Screen.height);
Debug.Log("Texture Width : " + width + " Texture Height : " + height);
//Save the screenshot to disk
byte[] byteArray = ss.EncodeToPNG();
string savePath = Application.persistentDataPath + "/ScreenshotSave.png";
System.IO.File.WriteAllBytes(savePath, byteArray);
Debug.Log("Screenshot Path : " + savePath);
// Destroy texture to avoid memory leaks
Destroy(ss);
}
}
This code I found here creates a spectogram of a given file but it keeps me waiting while it is playing and drawing the spectrogram.
I need to modify this code to create the spectrogram at once, without playing the file.
Thanks in advance.
public partial class Form1 : Form
{
private int _handle;
private int _pos;
private BASSTimer _timer;
private Visuals _visuals;
public Form1()
{
InitializeComponent();
}
private void timer_Tick(object sender, EventArgs e)
{
bool spectrum3DVoicePrint = _visuals.CreateSpectrum3DVoicePrint(_handle, pictureBox1.CreateGraphics(),
pictureBox1.Bounds, Color.Cyan, Color.Green,
_pos, false, true);
_pos++;
if (_pos >= pictureBox1.Width)
{
_pos = 0;
}
}
private void Form1_Load(object sender, EventArgs e)
{
string file = "..\\..\\mysong.mp3";
if (Bass.BASS_Init(-1, 44100, BASSInit.BASS_DEVICE_DEFAULT, Handle))
{
_handle = Bass.BASS_StreamCreateFile(file, 0, 0, BASSFlag.BASS_DEFAULT);
if (Bass.BASS_ChannelPlay(_handle, false))
{
_visuals = new Visuals();
_timer = new BASSTimer((int) (1.0d/10*1000));
_timer.Tick += timer_Tick;
_timer.Start();
}
}
}
}
Here's how I solved my problem.
The idea is that you should not listen to the whole record, you can move the audio cursor and evaluate the spectrum at certain points.
private Bitmap DrawSpectrogram(string fileName, int height, int stepsPerSecond)
{
Bass.BASS_Init(-1, 44100, BASSInit.BASS_DEVICE_DEFAULT, Handle);
int channel = Bass.BASS_StreamCreateFile(fileName, 0, 0, BASSFlag.BASS_DEFAULT);
long len = Bass.BASS_ChannelGetLength(channel, BASSMode.BASS_POS_BYTES); // the length in bytes
double time = Bass.BASS_ChannelBytes2Seconds(channel, len); // the length in seconds
int steps = (int)Math.Floor(stepsPerSecond * time);
Bitmap result = new Bitmap(steps, height);
Graphics g = Graphics.FromImage(result);
Visuals visuals = new Visuals();
Bass.BASS_ChannelPlay(channel, false);
for (int i = 0; i < steps; i++)
{
Bass.BASS_ChannelSetPosition(channel, 1.0 * i / stepsPerSecond);
visuals.CreateSpectrum3DVoicePrint(channel, g, new Rectangle(0, 0, result.Width, result.Height), Color.Black, Color.White, i, true, false);
}
Bass.BASS_ChannelStop(channel);
Bass.BASS_Stop();
Bass.BASS_Free();
return result;
}
I've been working on this project for a few months now, where I'm trying to integrate eye-tracking into Unity using OpenCVSharp. I've managed to get everything working, including the actual tracking of the pupil etc, however I've got a memory leak. Basically after 20-30seconds of the program running it freezes and the console errors saying "Unable to allocate (insert number here) bits". After looking at the memory usage during running of the program, you can see its use steadily climb until it maxes then crashes.
Now I've spent quite a while trying to fix the issue, and read a lot of help posts about releasing images/storage etc correctly. Despite the fact I'm doing this, it doesn't appear to be releasing them correctly. I tried using the garbage collector to force it to reclaim the memory however that didn't seem to work either. Am I just doing something fundamentally wrong with the images and how I reclaim them? Or is having the creation of new images each frame (even though I'm releasing them) causing the problem.
Any help would be greatly appreciated. Here's the code below, you can ignore a lot of the stuff within the update function as its to do with the actual tracking section and calibration. I realise the code is pretty messy, sorry about that! The main section to worry about is EyeDetection().
using UnityEngine;
using System.Collections;
using System;
using System.IO;
using OpenCvSharp;
using OpenCvSharp.Blob;
//using System.Xml;
//using System.Threading;
//using AForge;
//using OpenCvSharp.Extensions;
//using System.Windows.Media;
//using System.Windows.Media.Imaging;
public class CaptureScript2 : MonoBehaviour
{
//public GameObject planeObj;
public WebCamTexture webcamTexture; //Texture retrieved from the webcam
//public Texture2D texImage; //Texture to apply to plane
public string deviceName;
private int devId = 1;
private int imWidth = 800; //camera width
private int imHeight = 600; //camera height
private string errorMsg = "No errors found!";
private static IplImage camImage; //Ipl image of the converted webcam texture
//private static IplImage yuv;
//private static IplImage dst;
private CvCapture cap; //Current camera capture
//private IplImage eyeLeft;
//private IplImage eyeRight;
//private IplImage eyeLeftFinal;
//private IplImage eyeRightFinal;
private double leftEyeX;
private double leftEyeY;
private double rightEyeX;
private double rightEyeY;
private int calibState;
private double LTRCPx;
private double LTLCPx;
private double LBLCPy;
private double LTLCPy;
private double RTRCPx;
private double RTLCPx;
private double RBLCPy;
private double RTLCPy;
private double gazeWidth;
private double gazeHeight;
private double gazeScaleX;
private double gazeScaleY;
public static CvMemStorage storageFace;
public static CvMemStorage storage;
public static double gazePosX;
public static double gazePosY;
private bool printed = true;
//private CvRect r;
//private IplImage smallImg;
CvColor[] colors = new CvColor[]
{
new CvColor(0,0,255),
new CvColor(0,128,255),
new CvColor(0,255,255),
new CvColor(0,255,0),
new CvColor(255,128,0),
new CvColor(255,255,0),
new CvColor(255,0,0),
new CvColor(255,0,255),
};
//scale for small image
const double Scale = 1.25;
const double scaleEye = 10.0;
const double ScaleFactor = 2.5;
//must show 2 eyes on the screen
const int MinNeighbors = 2;
const int MinNeighborsFace = 1;
// Use this for initialization
void Start ()
{
//Webcam initialisation
WebCamDevice[] devices = WebCamTexture.devices;
Debug.Log ("num:" + devices.Length);
for (int i=0; i<devices.Length; i++)
{
print (devices [i].name);
if (devices [i].name.CompareTo (deviceName) == 1)
{
devId = i;
}
}
if (devId >= 0)
{
//mainImage = new IplImage (imWidth, imHeight, BitDepth.U8, 3);
}
//create capture from current device
cap = Cv.CreateCameraCapture(devId);
//set properties of the capture
Cv.SetCaptureProperty(cap, CaptureProperty.FrameWidth, imWidth);
Cv.SetCaptureProperty(cap, CaptureProperty.FrameHeight, imHeight);
//create window to display capture
//Cv.NamedWindow("Eye tracking", WindowMode.AutoSize);
Cv.NamedWindow ("EyeLeft", WindowMode.AutoSize);
Cv.NamedWindow ("EyeRight", WindowMode.AutoSize);
Cv.NamedWindow ("Face", WindowMode.AutoSize);
calibState = 1;
}
void Update ()
{
if(Input.GetKeyDown(KeyCode.Space) && calibState < 3)
{
calibState++;
}
if(Input.GetMouseButtonDown(0) && calibState < 4)
{
printed = false;
calibState++;
Cv.DestroyAllWindows();
Cv.ReleaseCapture(cap);
cap = Cv.CreateCameraCapture(devId);
}
//if device is connected
if (devId >= 0)
{
//cap = Cv.CreateCameraCapture(devId);
//Cv.Release
//retrieve the current frame from camera
camImage = Cv.QueryFrame(cap);
//detect eyes and apply circles
//
EyeDetection();
Cv.ReleaseImage(camImage);
//PupilTracking();
switch(calibState)
{
case 1:
LTRCPx = leftEyeX;
RTRCPx = rightEyeX;
break;
case 2:
LTLCPx = leftEyeX;
LTLCPy = leftEyeY;
RTLCPx = rightEyeX;
RTLCPy = rightEyeY;
break;
case 3:
LBLCPy = leftEyeY;// + rightEyeY) /2 ;
RBLCPy = rightEyeY;
break;
case 4:
//gazeWidth = (((LTRCPx - LTLCPx) + (RTRCPx - RTLCPx)) / 2) * -1;
//gazeHeight = ((LBLCPy - LTLCPy) + (RBLCPy - RTLCPy)) /2;
gazeWidth = LTLCPx -LTRCPx;
gazeHeight = LBLCPy - LTLCPy;
gazeScaleX = (Screen.width/gazeWidth);
gazeScaleY = Screen.height/gazeHeight;
gazePosX = gazeScaleX *(leftEyeX - LTRCPx);
gazePosY = gazeScaleY *(leftEyeY - LTLCPy);
break;
}
//Cv.ReleaseCapture(cap);
}
else
{
Debug.Log ("Can't find camera!");
}
//print (calibState);
if(printed == false)
{
print ("Gaze pos x = " + gazePosX);
print ("Gaze pos Y = " + gazePosY);
print ("Scale x = " + gazeScaleX);
print ("Scale y = " + gazeScaleY);
print ("Gaze width = " + gazeWidth);
print ("Gaze Height = " + gazeHeight);
print ("left eye x = " + leftEyeX);
print ("left eye Y = " + leftEyeY);
print ("calib state = " + calibState);
printed = true;
}
//Cv.ShowImage("Eye tracking", mainImage);
//Cv.ShowImage ("EyeLeft", grayEyeLeft);
//Cv.ShowImage ("EyeRight", grayEyeRight);
}
void EyeDetection()
{
IplImage mainImage = new IplImage (imWidth, imHeight, BitDepth.U8, 3);
IplImage smallImg = new IplImage(mainImage.Width, mainImage.Height ,BitDepth.U8, 1);
Cv.Resize (camImage, mainImage, Interpolation.Linear);
IplImage gray = new IplImage(mainImage.Size, BitDepth.U8, 1);
Cv.CvtColor (mainImage, gray, ColorConversion.BgrToGray);
Cv.Resize(gray, smallImg, Interpolation.Linear);
Cv.EqualizeHist(smallImg, smallImg);
Cv.ReleaseImage (gray);
//IplImage hack = Cv.LoadImage("\\Users\\User\\Desktop\\Honours Projects\\Project10\\Project\\Assets\\bug.jpeg");
//Cv.Erode (hack, hack);
//Cv.ReleaseImage (hack);
//uint sizeStore = 2877212;
CvHaarClassifierCascade cascadeFace = CvHaarClassifierCascade.FromFile("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_frontalface_alt2.xml");
CvMemStorage storageFace = new CvMemStorage();
storageFace.Clear ();
CvSeq<CvAvgComp> faces = Cv.HaarDetectObjects(smallImg, cascadeFace, storageFace, ScaleFactor, MinNeighborsFace, 0, new CvSize(30,30));
for(int j = 0; j < faces.Total; j++)
{
CvRect face = faces[j].Value.Rect;
CvHaarClassifierCascade cascadeEye = CvHaarClassifierCascade.FromFile ("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_eye.xml");
IplImage faceImg = new IplImage(face.Width, face.Height, BitDepth.U8, 1);
IplImage faceImgColour = new IplImage(face.Width, face.Height, BitDepth.U8, 3);
CvMemStorage storage = new CvMemStorage();
storage.Clear ();
Cv.SetImageROI(smallImg, face);
Cv.Copy (smallImg, faceImg);
Cv.ResetImageROI(smallImg);
Cv.SetImageROI(mainImage, face);
Cv.Copy (mainImage, faceImgColour);
Cv.ResetImageROI(mainImage);
Cv.ShowImage ("Face", faceImgColour);
CvSeq<CvAvgComp> eyes = Cv.HaarDetectObjects(faceImg, cascadeEye, storage, ScaleFactor, MinNeighbors, 0, new CvSize(30, 30));
for(int i = 0; i < eyes.Total; i++)
{
CvRect r = eyes[i].Value.Rect;
Cv.SetImageROI(faceImgColour, r);
if(i == 1)
{
IplImage eyeLeft = new IplImage(new CvSize(r.Width, r.Height), BitDepth.U8, 3);
Cv.Copy(faceImgColour, eyeLeft);
IplImage yuv = new IplImage(eyeLeft.Size, BitDepth.U8, 3);
IplImage dst = new IplImage(eyeLeft.Size, BitDepth.U8, 3);
IplImage grayEyeLeft = new IplImage(eyeLeft.Size, BitDepth.U8, 1);
IplImage eyeLeftFinal = new IplImage(Cv.Round(grayEyeLeft.Width * scaleEye), Cv.Round(grayEyeLeft.Height * scaleEye), BitDepth.U8, 1);
Cv.CvtColor(eyeLeft, yuv, ColorConversion.BgrToCrCb);
Cv.Not(yuv, dst);
Cv.CvtColor(dst,eyeLeft,ColorConversion.CrCbToBgr);
Cv.CvtColor(eyeLeft, grayEyeLeft, ColorConversion.BgrToGray);
Cv.Resize (grayEyeLeft, eyeLeftFinal, Interpolation.Linear);
Cv.Threshold(eyeLeftFinal, eyeLeftFinal, 230, 230, ThresholdType.Binary);
CvBlobs b1 = new CvBlobs(eyeLeftFinal);
if(b1.Count > 0)
{
leftEyeX = b1.LargestBlob().Centroid.X;
leftEyeY = b1.LargestBlob().Centroid.Y;
}
Cv.ShowImage ("EyeLeft", eyeLeftFinal);
Cv.ReleaseImage (yuv);
Cv.ReleaseImage (dst);
Cv.ReleaseImage (grayEyeLeft);
Cv.ReleaseImage (eyeLeftFinal);
b1.Clear();
Cv.ReleaseImage (eyeLeft);
}
if(i == 0)
{
IplImage eyeRight = new IplImage(new CvSize(r.Width, r.Height), BitDepth.U8, 3);
Cv.Copy(faceImgColour, eyeRight);
IplImage yuv2 = new IplImage(eyeRight.Size, BitDepth.U8, 3);
IplImage dst2 = new IplImage(eyeRight.Size, BitDepth.U8, 3);
IplImage grayEyeRight = new IplImage(eyeRight.Size, BitDepth.U8, 1);
IplImage eyeRightFinal = new IplImage(Cv.Round(grayEyeRight.Width * scaleEye), Cv.Round(grayEyeRight.Height * scaleEye), BitDepth.U8, 1);
Cv.CvtColor(eyeRight, yuv2, ColorConversion.BgrToCrCb);
Cv.Not(yuv2, dst2);
Cv.CvtColor(dst2,eyeRight,ColorConversion.CrCbToBgr);
Cv.CvtColor(eyeRight, grayEyeRight, ColorConversion.BgrToGray);
Cv.Resize (grayEyeRight, eyeRightFinal, Interpolation.Linear);
Cv.Threshold(eyeRightFinal, eyeRightFinal, 230, 230, ThresholdType.Binary);
CvBlobs b2 = new CvBlobs(eyeRightFinal);
if(b2.Count > 0)
{
rightEyeX = b2.LargestBlob().Centroid.X;
rightEyeY = b2.LargestBlob().Centroid.Y;
}
Cv.ShowImage ("EyeRight", eyeRightFinal);
Cv.ReleaseImage (yuv2);
Cv.ReleaseImage (dst2);
Cv.ReleaseImage (grayEyeRight);
Cv.ReleaseImage (eyeRightFinal);
b2.Clear ();
Cv.ReleaseImage (eyeRight);
}
Cv.ResetImageROI(faceImgColour);
}
//Cv.ShowImage("Eye tracking", mainImage);
Cv.ReleaseImage (faceImg);
Cv.ReleaseImage (faceImgColour);
Cv.ReleaseMemStorage(storage);
Cv.ReleaseHaarClassifierCascade(cascadeEye);
}
Cv.ReleaseMemStorage(storageFace);
Cv.ReleaseHaarClassifierCascade(cascadeFace);
//PupilTracking ();
Cv.ReleaseImage(smallImg);
Cv.ReleaseImage (mainImage);
GC.Collect();
}
void OnGUI ()
{
GUI.Label (new Rect (200, 200, 100, 90), errorMsg);
}
void OnDestroy()
{
Cv.DestroyAllWindows();
Cv.ReleaseCapture(cap);
}
I am not familiar with OpenCV, but as a general rule:
I would limit instantiation in the Update loop, like new CvMemStorage()
Don't load data in the Update loop: CvHaarClassifierCascade.FromFile("\\Users\\User\\Documents\\opencv\\sources\\data\\haarcascades\\haarcascade_frontalface_alt2.xml"); That should be loaded once on start and assigned to a class variable.
Allocate on start and Release only if needed.
I find that in most situations there's plenty of RAM to go around. I allocate on Start() what is going to be used over and over, especially 60 times per second in the Update() loop!
But Loading XML data, Allocating and releasing variables like storage or cascadeEye, is bound to create issues when the app is trying to do so 60 times a second.
Creating and destroying objects is a very, very, very expensive. So do so wisely and sparingly, especially when dealing with complex data structures like the OpenCV objects, bitmaps or loaders.
hth.
So I've been playing around with SlimDX for quite a while now,
and experienced some issues with big STL files.
While on OpenGL they load without flinching I get down to 1-2 FPS an soon as I load files of about 100mb (same issues with multiple files) in SharpGL. Did I miss anything, or is there anything I am simply doing not right at all ?
Edit: Just to specify my question: is performance with SlimDX on 1.000.000+ vertices always that poor ?
Edit: I know, that using an index buffer would be more efficient, as well as I know that CullingMode.None isn't really a FPS-Saver, but in the OpenGL test I've even used two sided lighting and a bit of smoothing, which should be as hard as creating (in the worst case) 3 times as many points as necessary.
Edit: Out of curiosity I modified the code to include an indexBuffer, and it really did have some impact on the FPS, I am validating this right now
BasicFramework.cs
#region Using Statements
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Text;
using SlimDX;
using SlimDX.Direct3D11;
using SlimDX.DXGI;
using SlimDX.Windows;
using Device = SlimDX.Direct3D11.Device;
using Texture2D = SlimDX.Direct3D11.Texture2D;
#endregion
namespace SlimDX_Evaluation
{
public abstract class BasicFramework : IDisposable
{
#region Membervariables
//Objects
private RenderForm renderForm;
private SwapChain swapChain;
private Factory factory;
private Device device;
private DeviceContext deviceContext;
private Texture2D backBufffer;
private Texture2D depthBuffer;
private RenderTargetView renderTargetView;
private DepthStencilView depthStencilView;
private TimeSpan lastFrameTime;
private Stopwatch clock;
//Variables
private bool userResized;
private bool isResizing;
#endregion
#region Constructors
/**
* The Constructor initializes the default behavior of the Framework.
* It is not supposed to be replaced, the customization should be done in the Constructor
*/
public BasicFramework() : this("My Title") { }
public BasicFramework(string title)
{
//Create the winForm
renderForm = new RenderForm(title);
renderForm.ClientSize = new System.Drawing.Size(800, 480);
renderForm.MaximizeBox = true;
renderForm.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Sizable;
//Hook into Windows.Forms Event
renderForm.ClientSizeChanged += HandleClientSizeChanged;
//Generate SwapChain
var desc = new SwapChainDescription()
{
BufferCount = 1,
ModeDescription = new ModeDescription(renderForm.ClientSize.Width,
renderForm.ClientSize.Height,
new Rational(60, 1),
Format.B8G8R8A8_UNorm),
IsWindowed = true,
OutputHandle = renderForm.Handle,
SampleDescription = new SampleDescription(1, 0),
SwapEffect = SwapEffect.Discard,
Usage = Usage.RenderTargetOutput,
};
Device.CreateWithSwapChain(
DriverType.Hardware,
DeviceCreationFlags.None,
desc,
out device,
out swapChain
);
//Set DeviceContext
deviceContext = device.ImmediateContext;
// prevent DXGI handling of alt+enter,prt scrn, etc which doesn't work properly with Winforms
using (var factory = swapChain.GetParent<Factory>())
factory.SetWindowAssociation(renderForm.Handle, WindowAssociationFlags.IgnoreAll);
//Generate Backbuffer
backBufffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0);
renderTargetView = new RenderTargetView(device, backBufffer);
//Generate Depthbuffer and DepthBufferView
depthBuffer = new Texture2D(device, new Texture2DDescription()
{
Format = Format.D16_UNorm,
ArraySize = 1,
MipLevels = 1,
Width = renderForm.ClientSize.Width,
Height = renderForm.ClientSize.Height,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Default,
BindFlags = BindFlags.DepthStencil,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None,
});
depthStencilView = new DepthStencilView(device, depthBuffer);
//Define Rasterizer
RasterizerStateDescription rasterizerDescription = new RasterizerStateDescription()
{
CullMode = CullMode.None,
FillMode = FillMode.Solid,
IsAntialiasedLineEnabled = true,
IsFrontCounterclockwise = true,
IsMultisampleEnabled = true,
IsDepthClipEnabled = true,
IsScissorEnabled = false
};
deviceContext.Rasterizer.State = RasterizerState.FromDescription(device, rasterizerDescription);
//Set ViewPort
deviceContext.Rasterizer.SetViewports(new Viewport(
0,
0,
renderForm.Width,
renderForm.Height));
deviceContext.OutputMerger.SetTargets(depthStencilView, renderTargetView);
//Force recalibration on first load
userResized = true;
}
#endregion
#region Run
public void Run()
{
clock = new Stopwatch();
clock.Start();
this.lastFrameTime = clock.Elapsed;
Initialize();
LoadContent();
MessagePump.Run(renderForm, () =>
{
if (userResized)
{
backBufffer.Dispose();
RenderTargetView.Dispose();
depthBuffer.Dispose();
depthStencilView.Dispose();
//Resize the buffers
swapChain.ResizeBuffers(
0,
renderForm.ClientSize.Width,
renderForm.ClientSize.Height,
Format.Unknown,
SwapChainFlags.None
);
//Get the new Backbuffer
backBufffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0);
//Renew RenderTargetView
renderTargetView = new RenderTargetView(device, backBufffer);
//Create the new DepthBuffer
depthBuffer = new Texture2D(device, new Texture2DDescription()
{
Format = Format.D32_Float_S8X24_UInt,
ArraySize = 1,
MipLevels = 1,
Width = renderForm.ClientSize.Width,
Height = renderForm.ClientSize.Height,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Default,
BindFlags = BindFlags.DepthStencil,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None
});
//Create DepthBufferView
depthStencilView = new DepthStencilView(device, depthBuffer);
//SetUp Targets and Viewports for Rendering
deviceContext.Rasterizer.SetViewports(new Viewport(0, 0, renderForm.Width, renderForm.Height));
deviceContext.OutputMerger.SetTargets(depthStencilView, renderTargetView);
//finished resizing
isResizing = userResized = false;
}
TimeSpan timeSinceLastFrame = clock.Elapsed - this.lastFrameTime;
this.lastFrameTime = clock.Elapsed;
Update(clock.Elapsed, timeSinceLastFrame);
BeginFrame();
Draw(clock.Elapsed, timeSinceLastFrame);
EndFrame();
});
UnloadContent();
}
#endregion
#region MethodsToOverride
public virtual void Update(TimeSpan totalRunTime, TimeSpan timeSinceLastFrame)
{
}
public virtual void Draw(TimeSpan totalRunTime, TimeSpan timeSinceLastFrame)
{
}
public virtual void BeginFrame()
{
}
public void EndFrame()
{
swapChain.Present(0, PresentFlags.None); //Presents the image to the user
}
public virtual void Initialize()
{
}
public virtual void LoadContent()
{
}
public virtual void UnloadContent()
{
}
public virtual void Dispose()
{
renderForm.Dispose();
backBufffer.Dispose();
deviceContext.ClearState();
deviceContext.Flush();
device.Dispose();
deviceContext.Dispose();
depthBuffer.Dispose();
depthStencilView.Dispose();
swapChain.Dispose();
}
#endregion
#region Handlers
private void HandleResize(object sender, EventArgs e)
{
backBufffer.Dispose();
RenderTargetView.Dispose();
depthBuffer.Dispose();
depthStencilView.Dispose();
//Resize the buffers
swapChain.ResizeBuffers(
0,
renderForm.ClientSize.Width,
renderForm.ClientSize.Height,
Format.Unknown,
SwapChainFlags.None
);
//Get the new Backbuffer
backBufffer = Texture2D.FromSwapChain<Texture2D>(swapChain, 0);
//Renew RenderTargetView
renderTargetView = new RenderTargetView(device, backBufffer);
//Create the new DepthBuffer
depthBuffer = new Texture2D(device, new Texture2DDescription()
{
Format = Format.D32_Float_S8X24_UInt,
ArraySize = 1,
MipLevels = 1,
Width = renderForm.ClientSize.Width,
Height = renderForm.ClientSize.Height,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Default,
BindFlags = BindFlags.DepthStencil,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None
});
//Create DepthBufferView
depthStencilView = new DepthStencilView(device, depthBuffer);
//SetUp Targets and Viewports for Rendering
deviceContext.Rasterizer.SetViewports(new Viewport(0, 0, renderForm.Width, renderForm.Height));
deviceContext.OutputMerger.SetTargets(depthStencilView, renderTargetView);
//finished resizing
isResizing = userResized = false;
TimeSpan timeSinceLastFrame = clock.Elapsed - this.lastFrameTime;
this.lastFrameTime = clock.Elapsed;
Update(clock.Elapsed, timeSinceLastFrame);
BeginFrame();
Draw(clock.Elapsed, timeSinceLastFrame);
EndFrame();
}
private void HandleClientSizeChanged(object sender, EventArgs e)
{
userResized = true;
}
#endregion
#region GetAndSet
public Device Device
{
get
{
return this.device;
}
}
public DeviceContext DeviceContext
{
get
{
return this.deviceContext;
}
}
public RenderTargetView RenderTargetView
{
get
{
return this.renderTargetView;
}
}
public RenderForm RenderForm
{
get
{
return this.renderForm;
}
}
public DepthStencilView DepthStencilView
{
get
{
return this.depthStencilView;
}
}
#endregion
}
}
SimpleIntegration.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using SlimDX;
using SlimDX.D3DCompiler;
using SlimDX.Direct3D11;
using Buffer = SlimDX.Direct3D11.Buffer;
using System.Diagnostics;
namespace SlimDX_Evaluation
{
class SampleIntegration : BasicFramework
{
#region Members
private VertexShader vertexShader;
private PixelShader pixelShader;
private Buffer constantBuffer;
private VertexBufferBinding vertexBufferBinding_model;
private int vertCount;
private Stopwatch timer;
private long lastFrame;
private int frameCount;
private Matrix view;
private Matrix proj;
private Matrix viewProj;
Matrix worldViewProj;
#endregion
public override void Draw(TimeSpan totalRunTime, TimeSpan timeSinceLastFrame)
{
//Output FPS
frameCount++;
if (timer.ElapsedMilliseconds - lastFrame >= 1000)
{
Console.WriteLine("FPS: " + frameCount);
lastFrame = timer.ElapsedMilliseconds;
frameCount = 0;
}
worldViewProj = Matrix.Multiply(Matrix.RotationAxis(Vector3.UnitY, timer.ElapsedMilliseconds / 1000.0f), viewProj);
//Update ConstantBuffer
var buffer = new MyConstantBuffer();
buffer.worldViewProj = worldViewProj;
var data = new DataStream(System.Runtime.InteropServices.Marshal.SizeOf(new MyConstantBuffer()), true, true);
data.Write(buffer);
data.Position = 0;
DeviceContext.UpdateSubresource(new DataBox(0, 0, data),constantBuffer,0);
//Clear
Device.ImmediateContext.ClearRenderTargetView(RenderTargetView, Color.WhiteSmoke);
Device.ImmediateContext.ClearDepthStencilView(DepthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0);
//Draw
DeviceContext.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
DeviceContext.InputAssembler.SetVertexBuffers(0, vertexBufferBinding_model);
Device.ImmediateContext.Draw(vertCount, 0);
base.Draw(totalRunTime, timeSinceLastFrame);
}
public override void LoadContent()
{
//Initialize the timer
timer = new Stopwatch();
timer.Start();
//Initialize Matrices
view = Matrix.LookAtLH(new Vector3(0, 100, -500), new Vector3(0, 0, 0), Vector3.UnitY);
proj = Matrix.PerspectiveFovLH((float)Math.PI / 4.0f, RenderForm.ClientSize.Width / RenderForm.ClientSize.Height, 0.1f, 10000.0f);
viewProj = Matrix.Multiply(view, proj);
//Load Shaders
ShaderBytecode vertexShaderByteCode;
ShaderBytecode pixelShaderByteCode;
try
{
vertexShaderByteCode = ShaderBytecode.CompileFromFile("Shaders/shader.hlsl", "VShader", "vs_4_0",ShaderFlags.None,EffectFlags.None);
pixelShaderByteCode = ShaderBytecode.CompileFromFile("Shaders/shader.hlsl", "PShader", "ps_4_0",ShaderFlags.None,EffectFlags.None);
}
catch (System.Exception ex)
{
throw ex;
}
vertexShader = new VertexShader(Device, vertexShaderByteCode);
pixelShader = new PixelShader(Device, pixelShaderByteCode);
DeviceContext.VertexShader.Set(vertexShader);
DeviceContext.PixelShader.Set(pixelShader);
var signature = ShaderSignature.GetInputSignature(vertexShaderByteCode);
//Define first 16 floats as Position, next 16 as Color, next 12 normal (4 cords, 4 Color, 3 normal parts)
InputElement[] elements = new InputElement[]
{
new InputElement("POSITION", 0, SlimDX.DXGI.Format.R32G32B32A32_Float, 0, 0),
new InputElement("COLOR" , 0, SlimDX.DXGI.Format.R32G32B32A32_Float, 16, 0),
new InputElement("NORMAL" , 0, SlimDX.DXGI.Format.R32G32B32_Float, 32, 0),
};
//Define Layout for the InputAssembler
DeviceContext.InputAssembler.InputLayout = new InputLayout(Device, signature, elements);
//Generate and link constant buffers
constantBuffer = new Buffer(Device, System.Runtime.InteropServices.Marshal.SizeOf(new Matrix()), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
DeviceContext.VertexShader.SetConstantBuffer(constantBuffer,0);
//load STL and generate Vertices from it
ModuleWorks.Meshf meshf = ModuleWorks.MeshHelper.ReadSTLf(#"C:\ModuleWorks\STL\Homer.stl", ModuleWorks.Unit.Metric);
try
{
vertCount = meshf.TriangleCount * 3;
var vertices_model = new DataStream(vertCount * System.Runtime.InteropServices.Marshal.SizeOf(typeof(Vertex)), true, true);
var stopWatch = new Stopwatch();
stopWatch.Start();
for (int x = 0; x < meshf.TriangleCount; x++)
{
var triangle = meshf.GetTriangle(x);
var normal = triangle.Normal;
vertices_model.Write(new Vertex(meshf.GetPoint(triangle.Idx1).X, meshf.GetPoint(triangle.Idx1).Y, meshf.GetPoint(triangle.Idx1).Z, 1.0f, 0.0f, 0.0f, 1.0f, normal.X, normal.Y, normal.Z));
vertices_model.Write(new Vertex(meshf.GetPoint(triangle.Idx2).X, meshf.GetPoint(triangle.Idx2).Y, meshf.GetPoint(triangle.Idx2).Z, 1.0f, 0.0f, 0.0f, 1.0f, normal.X, normal.Y, normal.Z));
vertices_model.Write(new Vertex(meshf.GetPoint(triangle.Idx3).X, meshf.GetPoint(triangle.Idx3).Y, meshf.GetPoint(triangle.Idx3).Z, 1.0f, 0.0f, 0.0f, 1.0f, normal.X, normal.Y, normal.Z));
}
vertices_model.Position = 0;
//Generate VertexBufferBinding
var sizeInBytes = vertCount * System.Runtime.InteropServices.Marshal.SizeOf(typeof(Vertex));
var stride = System.Runtime.InteropServices.Marshal.SizeOf(typeof(Vector4)) * 2 + System.Runtime.InteropServices.Marshal.SizeOf(typeof(Vector3));
var vertexBuffer_model = new Buffer(Device, vertices_model, sizeInBytes, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
vertexBufferBinding_model = new VertexBufferBinding(vertexBuffer_model, stride, 0);
vertices_model.Close();
}
catch (System.Exception ex)
{
Console.WriteLine(ex);
return;
}
}
public override void Dispose()
{
vertexShader.Dispose();
pixelShader.Dispose();
constantBuffer.Dispose();
base.Dispose();
}
}
}
shader.hlsl
cbuffer matrixBuffer : register(b0)
{
float4x4 worldViewProj;
};
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
float3 normal : NORMAL;
};
VOut VShader(float4 position : POSITION, float4 color : COLOR, float3 normal : NORMAL)
{
VOut output = (VOut)0;
output.position = mul(worldViewProj, position);
output.normal = normalize(mul((float3x3)worldViewProj,normal));
output.color = color;
return output;
}
float4 PShader(VOut vout) : SV_TARGET
{
return vout.color;
}
Thanks in advance
I solved it.
The issue was based on an inperformant approach to adding the vertices.
For further information, checkout this nice paper I've found
I would like to try out a code in Microsoft Visual C# Express Edition and I'm getting this error:
The type or namespace name 'Properties' does not exist in the namespace 'EducationalSuite.Core' (are you missing an assembly reference?)
I right click the Reference but I didn't find the "Properties" either the "EducationalSuite.Core".
Here is the code:
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.IO;
using System.Text;
using System.Windows.Forms;
using System.Media;
using System.Resources;
namespace EducationalSuite.Core.Plugins
{
public delegate void RectangleItemClickedDelegate(Rectangle rect, int index);
public partial class GeoSafariItem : Control
{
protected List<Rectangle> lastFlashingItems = new List<Rectangle>();
protected int lastHeight = 0;
private Image imageFile = null;
protected List<Rectangle> hotspots = new List<Rectangle>();
protected Dictionary<int, string> textItems = new Dictionary<int, string>();
protected Dictionary<int, FileInfo> audioItems = new Dictionary<int, FileInfo>();
protected Rectangle lastRectangle;
protected int selectedIndex = 0;
protected int countItemsLeft = 6;
protected int countItemsRight = 6;
protected int imageOffsetTop = 0;
protected int imageOffsetBottom = 0;
protected bool paintHotSpots = false, colorSwitch = false, paintItemLabels = false;
protected Timer timer = new Timer();
public event RectangleItemClickedDelegate HotspotClick;
public event RectangleItemClickedDelegate QuestionItemClick;
public event RectangleItemClickedDelegate QuestionItemRightClick;
protected void OnHotspotClick(Rectangle rect, int index)
{
if (HotspotClick != null)
{
HotspotClick(this.RectangleToScreen(rect), index);
}
}
protected void OnQuestionItemRightClick(Rectangle rect, int index)
{
if (QuestionItemRightClick != null)
{
QuestionItemRightClick(this.RectangleToScreen(rect), index);
}
}
protected void OnQuestionItemClick(Rectangle rect, int index)
{
if (QuestionItemClick != null)
{
QuestionItemClick(this.RectangleToScreen(rect), index);
}
}
public GeoSafariItem()
{
this.imageFile = EducationalSuite.Core.Properties.Resources.singlepixel;
timer.Interval = 100;
timer.Tick += new EventHandler(timer_Tick);
timer.Enabled = true;
this.MouseUp += new MouseEventHandler(GeoSafariItem_MouseUp);
// Activates double buffering
SetStyle(ControlStyles.UserPaint, true);
SetStyle(ControlStyles.AllPaintingInWmPaint, true);
SetStyle(ControlStyles.DoubleBuffer, true);
SetStyle(ControlStyles.ResizeRedraw, true);
this.DoubleBuffered = true;
//InitializeComponent();
}
public void SetItemText(int index, string text)
{
if (string.IsNullOrEmpty(text))
{
if (this.textItems.ContainsKey(index)) textItems.Remove(index);
}
else
{
this.textItems[index] = text;
}
if (PaintItemLabels)
{
this.Invalidate();
}
}
public string GetItemText(int index)
{
if (this.textItems.ContainsKey(index))
{
return this.textItems[index];
}
else
{
return string.Empty;
}
}
public void SetItemAudio(int index, FileInfo file)
{
if ((file == null) && !file.Exists)
{
if (this.audioItems.ContainsKey(index)) audioItems.Remove(index);
}
else
{
this.audioItems[index] = file;
}
}
public FileInfo GetItemAudio(int index)
{
if (this.audioItems.ContainsKey(index))
{
return this.audioItems[index];
}
else
{
return null;
}
}
#region Recording Regions
bool isRecording = false;
int recordingIndex = 0;
Point recordTopLeft = Point.Empty;
Point recordBottomRight = Point.Empty;
List<Rectangle> recordedRectangles = new List<Rectangle>();
public void StartRecording()
{
isRecording = true;
recordingIndex = 0;
selectedIndex = 0;
recordedRectangles.Clear();
this.MouseUp += new MouseEventHandler(GeoSafariItemRecord_MouseUp);
this.Invalidate();
}
public List<Rectangle> FinishRecording()
{
isRecording = false;
this.MouseUp -= new MouseEventHandler(GeoSafariItemRecord_MouseUp);
this.Invalidate();
this.Hotspots.Clear();
foreach (Rectangle r in recordedRectangles)
{
this.Hotspots.Add(r);
}
return recordedRectangles;
}
private void GeoSafariItemRecord_MouseUp(object sender, MouseEventArgs e)
{
if (isRecording)
{
Rectangle size = SizeRect;
double ratio = (double)imageFile.Height / (double)size.Height;
if (recordTopLeft == Point.Empty)
{
recordTopLeft = new Point(
(int)(((double)e.Location.X - (double)size.Left) * ratio),
(int)(((double)e.Location.Y - (double)size.Top) * ratio)
);
}
else
{
recordBottomRight = new Point(
(int)(((double)e.Location.X - (double)size.Left) * ratio),
(int)(((double)e.Location.Y - (double)size.Top) * ratio)
);
Rectangle r = new Rectangle(recordTopLeft,
new Size(recordBottomRight.X - recordTopLeft.X, recordBottomRight.Y - recordTopLeft.Y));
this.recordedRectangles.Add(r);
recordingIndex++;
selectedIndex++;
recordTopLeft = Point.Empty;
recordBottomRight = Point.Empty;
}
}
this.Invalidate();
}
#endregion
void timer_Tick(object sender, EventArgs e)
{
colorSwitch = !colorSwitch;
if (lastRectangle.Width > 0)
{
this.Invalidate(lastRectangle);
}
else
{
this.Invalidate();
}
}
private Rectangle SizeRect
{
get
{
int rw, rh,
cw = (this.Width - 42),
ch = (this.Height - 2),
ox = 21,
oy = 1;
rw = cw;
rh = ch;
double imageRatio = (double)imageFile.Width / (double)imageFile.Height;
double controlRatio = (double)cw / (double)ch;
if (controlRatio > imageRatio)
{
rw = (int)Math.Round((double)rh * imageRatio);
ox += Math.Abs(rw - cw) / 2;
}
else if (controlRatio < imageRatio)
{
rh = (int)Math.Round((double)rw / imageRatio);
oy += Math.Abs(rh - ch) / 2;
}
return new Rectangle(ox, oy, rw, rh);
}
}
void GeoSafariItem_MouseUp(object sender, MouseEventArgs e)
{
Rectangle size = SizeRect;
for (int i = 0; i < hotspots.Count; i++)
{
Rectangle hotspot = hotspots[i];
double ratio = (double)size.Height / (double)imageFile.Height;
Rectangle adjustedRectange = new Rectangle(
size.Left + (int)(hotspot.X * ratio),
size.Top + (int)(hotspot.Y * ratio),
(int)(hotspot.Width * ratio),
(int)(hotspot.Height * ratio));
if (adjustedRectange.Contains(e.Location))
{
OnHotspotClick(hotspot, i);
return;
}
}
for (int i = 0; i < lastFlashingItems.Count; i++)
{
if (lastFlashingItems[i].Contains(e.Location))
{
if (e.Button == MouseButtons.Right)
OnQuestionItemRightClick(lastFlashingItems[i], i);
else
OnQuestionItemClick(lastFlashingItems[i], i);
return;
}
}
}
public List<Rectangle> Hotspots
{
get { return hotspots; }
}
public Image ImageFile
{
get { return imageFile; }
set
{
imageFile = value;
lastFlashingItems.Clear();
this.Invalidate();
}
}
public int SelectedIndex
{
get { return selectedIndex; }
set { selectedIndex = value; this.Invalidate(); }
}
public int CountItemsLeft
{
get { return countItemsLeft; }
set
{
countItemsLeft = value;
lastFlashingItems.Clear();
this.Invalidate();
}
}
public int CountItemsRight
{
get { return countItemsRight; }
set
{
countItemsRight = value;
lastFlashingItems.Clear();
this.Invalidate();
}
}
public int ImageOffsetTop
{
get { return imageOffsetTop; }
set
{
imageOffsetTop = value;
lastFlashingItems.Clear();
this.Invalidate();
}
}
public int ImageOffsetBottom
{
get { return imageOffsetBottom; }
set
{
imageOffsetBottom = value;
lastFlashingItems.Clear();
this.Invalidate();
}
}
public bool PaintHotSpots
{
get { return paintHotSpots; }
set { paintHotSpots = value; this.Invalidate(); }
}
public bool PaintItemLabels
{
get { return paintItemLabels; }
set { paintItemLabels = value; this.Invalidate(); }
}
protected override void OnPaint(PaintEventArgs pe)
{
Graphics g = pe.Graphics;
string itemText;
SizeF sizeItemText;
double topOffset = imageOffsetTop;
double bottomOffset = imageOffsetBottom;
double topOffsetPct = (double)topOffset / (double)imageFile.Height;
double bottomOffsetPct = (double)bottomOffset / (double)imageFile.Height;
Rectangle size = SizeRect;
SolidBrush brush = new SolidBrush(this.BackColor);
g.FillRectangle(brush, 0, 0, this.Width - 1, this.Height - 1);
g.FillRectangle(Brushes.Ivory, size.X - 25, size.Y, size.Width + 50, size.Height);
g.DrawRectangle(Pens.DarkKhaki, size.X - 25, size.Y - 1, size.Width + 50, size.Height + 1);
g.DrawImage(imageFile, size.X, size.Y, size.Width, size.Height);
Rectangle rect, rectItemText;
Brush selectedColor = (colorSwitch ? Brushes.Crimson : Brushes.Red);
topOffset = topOffsetPct * size.Height;
bottomOffset = bottomOffsetPct * size.Height;
int tmpHeight = (size.Height - (int)topOffset - (int)bottomOffset) / countItemsLeft;
if (size.Height != this.lastHeight || this.lastFlashingItems.Count == 0)
{
lastHeight = size.Height;
lastFlashingItems.Clear();
int actualIndex = 0;
for (int i = 0; i < countItemsLeft; i++)
{
int yy = size.Y + (tmpHeight * i) + (int)topOffset;
int xx = size.X - 18;
rect = new Rectangle(xx, yy, 16, 8);
this.lastFlashingItems.Add(rect);
g.FillRectangle((actualIndex == selectedIndex ? selectedColor : Brushes.Khaki), rect);
g.DrawRectangle(Pens.DarkKhaki, rect);
if (actualIndex == selectedIndex)
{
lastRectangle = rect;
}
itemText = this.GetItemText(actualIndex);
if (PaintItemLabels && !string.IsNullOrEmpty(itemText))
{
// Draw Text next to each notch
sizeItemText = g.MeasureString(itemText, this.Font);
int xxx = size.X + 10;
rectItemText = new Rectangle(xxx, yy, Convert.ToInt32(sizeItemText.Width), Convert.ToInt32(sizeItemText.Height));
PaintHotspot(g, Color.White, rectItemText, 200);
g.DrawString(itemText, this.Font, Brushes.Black, (float)xxx, (float)yy);
}
actualIndex++;
}
tmpHeight = (size.Height - (int)topOffset - (int)bottomOffset) / countItemsRight;
for (int i = 0; i < countItemsRight; i++)
{
int yy = size.Y + (tmpHeight * i) + (int)topOffset;
int xx = size.X + size.Width + 2;
rect = new Rectangle(xx, yy, 16, 8);
this.lastFlashingItems.Add(rect);
g.FillRectangle((actualIndex == selectedIndex ? selectedColor : Brushes.Khaki), rect);
g.DrawRectangle(Pens.DarkKhaki, rect);
if (actualIndex == selectedIndex)
{
lastRectangle = rect;
}
itemText = this.GetItemText(actualIndex);
if (PaintItemLabels && !string.IsNullOrEmpty(itemText))
{
// Draw Text next to each notch
sizeItemText = g.MeasureString(itemText, this.Font);
int xxx = size.X + size.Width - 10 - Convert.ToInt32(sizeItemText.Width);
rectItemText = new Rectangle(xxx, yy, Convert.ToInt32(sizeItemText.Width), Convert.ToInt32(sizeItemText.Height));
PaintHotspot(g, Color.White, rectItemText, 200);
g.DrawString(itemText, this.Font, Brushes.Black, (float)xxx, (float)yy);
}
actualIndex++;
}
}
else
{
lastHeight = size.Height;
for (int i = 0; i < lastFlashingItems.Count; i++)
{
g.FillRectangle((i == selectedIndex ? selectedColor : Brushes.Khaki), lastFlashingItems[i]);
g.DrawRectangle(Pens.DarkKhaki, lastFlashingItems[i]);
if (i == selectedIndex)
{
lastRectangle = lastFlashingItems[i];
}
}
if (PaintItemLabels)
{
int actualIndex = 0;
for (int i = 0; i < countItemsLeft; i++)
{
itemText = this.GetItemText(actualIndex);
if (!string.IsNullOrEmpty(itemText))
{
int yy = size.Y + (tmpHeight * i) + (int)topOffset;
// Draw Text next to each notch
sizeItemText = g.MeasureString(itemText, this.Font);
int xxx = size.X + 10;
rectItemText = new Rectangle(xxx, yy, Convert.ToInt32(sizeItemText.Width), Convert.ToInt32(sizeItemText.Height));
PaintHotspot(g, Color.White, rectItemText, 200);
g.DrawString(itemText, this.Font, Brushes.Black, (float)xxx, (float)yy);
}
actualIndex++;
}
tmpHeight = (size.Height - (int)topOffset - (int)bottomOffset) / countItemsRight;
for (int i = 0; i < countItemsRight; i++)
{
itemText = this.GetItemText(actualIndex);
if (!string.IsNullOrEmpty(itemText))
{
int yy = size.Y + (tmpHeight * i) + (int)topOffset;
// Draw Text next to each notch
sizeItemText = g.MeasureString(itemText, this.Font);
int xxx = size.X + size.Width - 10 - Convert.ToInt32(sizeItemText.Width);
rectItemText = new Rectangle(xxx, yy, Convert.ToInt32(sizeItemText.Width), Convert.ToInt32(sizeItemText.Height));
PaintHotspot(g, Color.White, rectItemText, 200);
g.DrawString(itemText, this.Font, Brushes.Black, (float)xxx, (float)yy);
}
actualIndex++;
}
}
}
// Calling the base class OnPaint
base.OnPaint(pe);
if (this.isRecording)
{
for (int i = 0; i < this.recordedRectangles.Count; i++)
{
rect = recordedRectangles[i];
double ratio = (double)size.Height / (double)imageFile.Height;
Rectangle adjustedRectange = new Rectangle(
size.Left + (int)(rect.X * ratio),
size.Top + (int)(rect.Y * ratio),
(int)(rect.Width * ratio),
(int)(rect.Height * ratio));
PaintHotspot(g, Color.LightBlue, adjustedRectange, (i + 1).ToString());
}
}
else if (this.paintHotSpots)
{
for (int i = 0; i < hotspots.Count; i++)
{
Rectangle hotspot = hotspots[i];
double ratio = (double)size.Height / (double)imageFile.Height;
Rectangle adjustedRectange = new Rectangle(
size.Left + (int)(hotspot.X * ratio),
size.Top + (int)(hotspot.Y * ratio),
(int)(hotspot.Width * ratio),
(int)(hotspot.Height * ratio));
PaintHotspot(g, Color.LightGreen, adjustedRectange, (i + 1).ToString());
}
}
}
protected virtual void PaintHotspot(Graphics g, Color c, Rectangle hotspot, int alpha)
{
PaintHotspot(g, c, hotspot, alpha, null);
}
protected virtual void PaintHotspot(Graphics g, Color c, Rectangle hotspot, string txt)
{
PaintHotspot(g, c, hotspot, 100, txt);
}
protected virtual void PaintHotspot(Graphics g, Color c, Rectangle hotspot, int alpha, string txt)
{
SolidBrush brush = new SolidBrush(Color.FromArgb(alpha, c));
g.FillRectangle(brush, hotspot);
if (!string.IsNullOrEmpty(txt))
g.DrawString(txt, this.Font, Brushes.DarkGreen, hotspot.Location);
}
}
}
Update
I imagine the following line is causing the error.
this.imageFile = EducationalSuite.Core.Properties.Resources.singlepixel;
The code is referring to a image resource "singlepixel". This image must be in the default resource file of the EducationalSuite.Core assembly. First confirm that you are currently editing the said assembly by opening Project Properties and checking the Default Namespace on the Application page. This should state "EducationalSuite.Core". If this isn't the case, you are most likely missing a reference to the said assembly.
If you have the EducationalSuite.Core project open the easiest way to add the singlepixel resource is to open project properties, Resources tab and creating a new default resource file. From the top open the Add Resource drop down and select existing file or new image depending on whether you have the file already or if you need to create it. Name the resource "singlepixel".
Visual Studio will generate Resources helper class under Properties namespace for you so you can access the resource through the Properties.Resources.singlepixel under EducationalSuite.Core in your code.
Old answer
In general Properties namespace is the namespace which contains application or user specific settings. You can add these settings (and the namespace) by navigating to the Settings tab in the Properties of the project.
Unfortunately it's kind of hard to say more based on this information. Could you provide the piece of code that causes this error?
If you double click the error message the IDE will take you to the line which is causing the error.
Most likely the piece of code is expecting a setting variable which is not added to the project.
Looks like you are missing the Reference. If it is not under References in solution explorer than I would do a file search in windows for "EducationalSuite.Core" to see where it is on the system and add it. You may also be missing the "using" statement? If you hover over the "Properties" text you should get a small drop down that will add the using's for you.
If this does not help, more information would be helpful?
Hope this helps!