In my Unity game each frame is rendered into a texture and then put together into a video using FFmpeg. Now my questions is if I am doing this right because avcodec_send_frame throws every time an exception.
I am pretty sure that I am doing something wrong or in the wrong order or simply missing something.
Here is the code for capturing the texture:
void Update() {
//StartCoroutine(CaptureFrame());
if (rt == null)
{
rect = new Rect(0, 0, captureWidth, captureHeight);
rt = new RenderTexture(captureWidth, captureHeight, 24);
frame = new Texture2D(captureWidth, captureHeight, TextureFormat.RGB24, false);
}
Camera camera = this.GetComponent<Camera>(); // NOTE: added because there was no reference to camera in original script; must add this script to Camera
camera.targetTexture = rt;
camera.Render();
RenderTexture.active = rt;
frame.ReadPixels(rect, 0, 0);
frame.Apply();
camera.targetTexture = null;
RenderTexture.active = null;
byte[] fileData = null;
fileData = frame.GetRawTextureData();
encoding(fileData, fileData.Length);
}
And here is the code for encoding and sending the byte data:
private unsafe void encoding(byte[] bytes, int size)
{
Debug.Log("Encoding...");
AVCodec* codec;
codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
int ret, got_output = 0;
AVCodecContext* codecContext = null;
codecContext = ffmpeg.avcodec_alloc_context3(codec);
codecContext->bit_rate = 400000;
codecContext->width = captureWidth;
codecContext->height = captureHeight;
//codecContext->time_base.den = 25;
//codecContext->time_base.num = 1;
AVRational timeBase = new AVRational();
timeBase.num = 1;
timeBase.den = 25;
codecContext->time_base = timeBase;
//AVStream* videoAVStream = null;
//videoAVStream->time_base = timeBase;
AVRational frameRate = new AVRational();
frameRate.num = 25;
frameRate.den = 1;
codecContext->framerate = frameRate;
codecContext->gop_size = 10;
codecContext->max_b_frames = 1;
codecContext->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
AVFrame* inputFrame;
inputFrame = ffmpeg.av_frame_alloc();
inputFrame->format = (int)codecContext->pix_fmt;
inputFrame->width = captureWidth;
inputFrame->height = captureHeight;
inputFrame->linesize[0] = inputFrame->width;
AVPixelFormat dst_pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P, src_pix_fmt = AVPixelFormat.AV_PIX_FMT_RGBA;
int src_w = 1920, src_h = 1080, dst_w = 1920, dst_h = 1080;
SwsContext* sws_ctx;
GCHandle pinned = GCHandle.Alloc(bytes, GCHandleType.Pinned);
IntPtr address = pinned.AddrOfPinnedObject();
sbyte** inputData = (sbyte**)address;
sws_ctx = ffmpeg.sws_getContext(src_w, src_h, src_pix_fmt,
dst_w, dst_h, dst_pix_fmt,
0, null, null, null);
fixed (int* lineSize = new int[1])
{
lineSize[0] = 4 * captureHeight;
// Convert RGBA to YUV420P
ffmpeg.sws_scale(sws_ctx, inputData, lineSize, 0, codecContext->width, inputFrame->extended_data, inputFrame->linesize);
}
inputFrame->pts = counter++;
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");
AVPacket pkt;
pkt = new AVPacket();
//pkt.data = inData;
AVPacket* packet = &pkt;
ffmpeg.av_init_packet(packet);
Debug.Log("pkt.size " + pkt.size);
pinned.Free();
AVDictionary* options = null;
ffmpeg.av_dict_set(&options, "pkt_size", "1300", 0);
ffmpeg.av_dict_set(&options, "buffer_size", "65535", 0);
AVIOContext* server = null;
ffmpeg.avio_open2(&server, "udp://192.168.0.1:1111", ffmpeg.AVIO_FLAG_WRITE, null, &options);
Debug.Log("encoded");
ret = ffmpeg.avcodec_encode_video2(codecContext, &pkt, inputFrame, &got_output);
ffmpeg.avio_write(server, pkt.data, pkt.size);
ffmpeg.av_free_packet(&pkt);
pkt.data = null;
pkt.size = 0;
}
And every time I start the game
if (ffmpeg.avcodec_send_frame(codecContext, inputFrame) < 0)
throw new ApplicationException("Error sending a frame for encoding!");
throws the exception.
Any help in fixing the issue would be greatly appreciated :)
Related
so the situation is like this
I use sharp dx to create a swap chain that linked to the XAML element SwapchainPanel now
I have something rendered over it and then I try to grab rendered data into byte[] to place into an image buffer
for that, I use this function ReadBuffer
private D3D11.Texture2D OffscreenstagingTexture;
private D3D11.Texture2D GetTexture(D3D11.Device _Device,
D3D11.DeviceContext _Context,
D3D11.Texture2DDescription _Desc)
{
if (OffscreenstagingTexture == null ||
OffscreenstagingTexture.Description.Width != _Desc.Width ||
OffscreenstagingTexture.Description.Height != _Desc.Height)
{
RemoveAndDispose(ref OffscreenstagingTexture);
D3D11.Texture2DDescription _2DDesc = new D3D11.Texture2DDescription
{
Format = DXGI.Format.B8G8R8A8_UNorm,
Height = _Desc.Height,
Width = _Desc.Width,
SampleDescription = new DXGI.SampleDescription(1, 0),
ArraySize = _Desc.ArraySize,
BindFlags = D3D11.BindFlags.None,
CpuAccessFlags = D3D11.CpuAccessFlags.Read | D3D11.CpuAccessFlags.Write,
Usage = D3D11.ResourceUsage.Staging,
MipLevels = _Desc.MipLevels
};
OffscreenstagingTexture = ToDispose(new D3D11.Texture2D(_Device, _2DDesc));
}
return OffscreenstagingTexture;
}
public byte[] ReadBuffer(D3D11.Texture2D _2DTexture)
{
byte[] data = null;
D3D11.Texture2DDescription _Desc = _2DTexture.Description;
D3D11.Device _Device = _2DTexture.Device;
D3D11.DeviceContext _Context = _Device.ImmediateContext;
D3D11.Texture2D _2DMappedTexture = GetTexture(_Device, _Context, _Desc);
try
{
_Context.CopyResource(_2DTexture, _2DMappedTexture);
int size = _Desc.Width * _Desc.Height * 4;
DataBox box = _Context.MapSubresource(_2DMappedTexture, 0, 0, D3D11.MapMode.Read, D3D11.MapFlags.None, out DataStream stream);
data = ToByteArray(stream);
stream.Dispose();
}
catch (Exception)
{
}
return data;
}
private byte[] ToByteArray(Stream inputStream)
{
using (MemoryStream ms = new MemoryStream())
{
inputStream.CopyTo(ms);
return ms.ToArray();
}
}
now, on most of the pc, it works fine but there are some pc where SwapChainPanle is returning bigger byte[] than expected
if I have 100 * 100 SwapChainPanel then the expected byte array size is 100 * 100 * 4 but it returns a bigger size array then that and because of that on some pc my app getting crashes
and I have checked this same crash at my dell Vostro 3558 also
So I found the fix, I have to update the Readbuffer function
public byte[] ReadBuffer(D3D11.Texture2D _2DTexture)
{
D3D11.Texture2DDescription _Desc = _2DTexture.Description;
D3D11.Device _Device = _2DTexture.Device;
D3D11.DeviceContext _Context = _Device.ImmediateContext;
D3D11.Texture2D _2DMappedTexture = GetTexture(_Device, _Context, _Desc);
_Context.CopyResource(_2DTexture, _2DMappedTexture);
DataBox box = _Context.MapSubresource(_2DMappedTexture, 0, 0, D3D11.MapMode.Read, D3D11.MapFlags.None, out DataStream stream);
var dataRectangle = new DataRectangle
{
DataPointer = stream.DataPointer,
Pitch = box.RowPitch
};
var strid = _Desc.Width * 4;
int size = _Desc.Width * _Desc.Height * 4;
using (var bitmap = new Bitmap(manager.WICFactory,
_2DMappedTexture.Description.Width,
_2DMappedTexture.Description.Height,
PixelFormat.Format32bppBGRA,
dataRectangle))
{
var data = new byte[size];
bitmap.CopyPixels(data, strid);
_Context.UnmapSubresource(_2DMappedTexture, 0);
return data;
}
}
and it works like a charm
Below are two methods, one uses MemoryStream and the other uses real file on the disk.
public void InsertImage(long x, long y, long? width, long? height, string sImagePath, WorksheetPart wsp)
{
try
{
DrawingsPart dp;
ImagePart imgp;
WorksheetDrawing wsd;
ImagePartType ipt;
switch (sImagePath.Substring(sImagePath.LastIndexOf('.') + 1).ToLower())
{
case "png":
ipt = ImagePartType.Png;
break;
case "jpg":
case "jpeg":
ipt = ImagePartType.Jpeg;
break;
case "gif":
ipt = ImagePartType.Gif;
break;
default:
return;
}
if (wsp.DrawingsPart == null)
{
//----- no drawing part exists, add a new one
dp = wsp.AddNewPart<DrawingsPart>();
imgp = dp.AddImagePart(ipt, wsp.GetIdOfPart(dp));
wsd = new WorksheetDrawing();
}
else
{
//----- use existing drawing part
dp = wsp.DrawingsPart;
imgp = dp.AddImagePart(ipt);
dp.CreateRelationshipToPart(imgp);
wsd = dp.WorksheetDrawing;
}
using (FileStream fs = new FileStream(sImagePath, FileMode.Open))
{
imgp.FeedData(fs);
}
int imageNumber = dp.ImageParts.Count<ImagePart>();
if (imageNumber == 1)
{
Drawing drawing = new Drawing();
drawing.Id = dp.GetIdOfPart(imgp);
wsp.Worksheet.Append(drawing);
}
NonVisualDrawingProperties nvdp = new NonVisualDrawingProperties();
nvdp.Id = new UInt32Value((uint)(1024 + imageNumber));
nvdp.Name = "Picture " + imageNumber.ToString();
nvdp.Description = "";
DocumentFormat.OpenXml.Drawing.PictureLocks picLocks = new DocumentFormat.OpenXml.Drawing.PictureLocks();
picLocks.NoChangeAspect = true;
picLocks.NoChangeArrowheads = true;
NonVisualPictureDrawingProperties nvpdp = new NonVisualPictureDrawingProperties();
nvpdp.PictureLocks = picLocks;
NonVisualPictureProperties nvpp = new NonVisualPictureProperties();
nvpp.NonVisualDrawingProperties = nvdp;
nvpp.NonVisualPictureDrawingProperties = nvpdp;
DocumentFormat.OpenXml.Drawing.Stretch stretch = new DocumentFormat.OpenXml.Drawing.Stretch();
stretch.FillRectangle = new DocumentFormat.OpenXml.Drawing.FillRectangle();
BlipFill blipFill = new BlipFill();
DocumentFormat.OpenXml.Drawing.Blip blip = new DocumentFormat.OpenXml.Drawing.Blip();
blip.Embed = dp.GetIdOfPart(imgp);
blip.CompressionState = DocumentFormat.OpenXml.Drawing.BlipCompressionValues.Print;
blipFill.Blip = blip;
blipFill.SourceRectangle = new DocumentFormat.OpenXml.Drawing.SourceRectangle();
blipFill.Append(stretch);
DocumentFormat.OpenXml.Drawing.Transform2D t2d = new DocumentFormat.OpenXml.Drawing.Transform2D();
DocumentFormat.OpenXml.Drawing.Offset offset = new DocumentFormat.OpenXml.Drawing.Offset();
offset.X = 0;
offset.Y = 0;
t2d.Offset = offset;
Bitmap bm = new Bitmap(sImagePath);
DocumentFormat.OpenXml.Drawing.Extents extents = new DocumentFormat.OpenXml.Drawing.Extents();
if (width == null)
extents.Cx = (long)bm.Width * (long)((float)914400 / bm.HorizontalResolution);
else
extents.Cx = width * (long)((float)914400 / bm.HorizontalResolution);
if (height == null)
extents.Cy = (long)bm.Height * (long)((float)914400 / bm.VerticalResolution);
else
extents.Cy = height * (long)((float)914400 / bm.VerticalResolution);
bm.Dispose();
t2d.Extents = extents;
ShapeProperties sp = new ShapeProperties();
sp.BlackWhiteMode = DocumentFormat.OpenXml.Drawing.BlackWhiteModeValues.Auto;
sp.Transform2D = t2d;
DocumentFormat.OpenXml.Drawing.PresetGeometry prstGeom = new DocumentFormat.OpenXml.Drawing.PresetGeometry();
prstGeom.Preset = DocumentFormat.OpenXml.Drawing.ShapeTypeValues.Rectangle;
prstGeom.AdjustValueList = new DocumentFormat.OpenXml.Drawing.AdjustValueList();
sp.Append(prstGeom);
sp.Append(new DocumentFormat.OpenXml.Drawing.NoFill());
DocumentFormat.OpenXml.Drawing.Spreadsheet.Picture picture = new DocumentFormat.OpenXml.Drawing.Spreadsheet.Picture();
picture.NonVisualPictureProperties = nvpp;
picture.BlipFill = blipFill;
picture.ShapeProperties = sp;
Position pos = new Position();
pos.X = x * 914400 / 72;
pos.Y = y * 914400 / 72;
Extent ext = new Extent();
ext.Cx = extents.Cx;
ext.Cy = extents.Cy;
AbsoluteAnchor anchor = new AbsoluteAnchor();
anchor.Position = pos;
anchor.Extent = ext;
anchor.Append(picture);
anchor.Append(new ClientData());
wsd.Append(anchor);
wsd.Save(dp);
}
catch (Exception ex)
{
throw ex; // or do something more interesting if you want
}
}
//use memorystream
public void InsertImage(long x, long y, long? width, long? height, MemoryStream ms, WorksheetPart wsp)
{
try
{
DrawingsPart dp;
ImagePart imgp;
WorksheetDrawing wsd;
ImagePartType ipt = ImagePartType.Jpeg;
if (wsp.DrawingsPart == null)
{
//----- no drawing part exists, add a new one
dp = wsp.AddNewPart<DrawingsPart>();
imgp = dp.AddImagePart(ipt, wsp.GetIdOfPart(dp));
wsd = new WorksheetDrawing();
}
else
{
//----- use existing drawing part
dp = wsp.DrawingsPart;
imgp = dp.AddImagePart(ipt);
dp.CreateRelationshipToPart(imgp);
wsd = dp.WorksheetDrawing;
}
Bitmap bitmap = new Bitmap(ms);
imgp.FeedData(ms);
int imageNumber = dp.ImageParts.Count<ImagePart>();
if (imageNumber == 1)
{
Drawing drawing = new Drawing();
drawing.Id = dp.GetIdOfPart(imgp);
wsp.Worksheet.Append(drawing);
}
NonVisualDrawingProperties nvdp = new NonVisualDrawingProperties();
nvdp.Id = new UInt32Value((uint)(1024 + imageNumber));
nvdp.Name = "Picture " + imageNumber.ToString();
nvdp.Description = "";
DocumentFormat.OpenXml.Drawing.PictureLocks picLocks = new DocumentFormat.OpenXml.Drawing.PictureLocks();
picLocks.NoChangeAspect = true;
picLocks.NoChangeArrowheads = true;
NonVisualPictureDrawingProperties nvpdp = new NonVisualPictureDrawingProperties();
nvpdp.PictureLocks = picLocks;
NonVisualPictureProperties nvpp = new NonVisualPictureProperties();
nvpp.NonVisualDrawingProperties = nvdp;
nvpp.NonVisualPictureDrawingProperties = nvpdp;
DocumentFormat.OpenXml.Drawing.Stretch stretch = new DocumentFormat.OpenXml.Drawing.Stretch();
stretch.FillRectangle = new DocumentFormat.OpenXml.Drawing.FillRectangle();
BlipFill blipFill = new BlipFill();
DocumentFormat.OpenXml.Drawing.Blip blip = new DocumentFormat.OpenXml.Drawing.Blip();
blip.Embed = dp.GetIdOfPart(imgp);
blip.CompressionState = DocumentFormat.OpenXml.Drawing.BlipCompressionValues.Print;
blipFill.Blip = blip;
blipFill.SourceRectangle = new DocumentFormat.OpenXml.Drawing.SourceRectangle();
blipFill.Append(stretch);
DocumentFormat.OpenXml.Drawing.Transform2D t2d = new DocumentFormat.OpenXml.Drawing.Transform2D();
DocumentFormat.OpenXml.Drawing.Offset offset = new DocumentFormat.OpenXml.Drawing.Offset();
offset.X = 0;
offset.Y = 0;
t2d.Offset = offset;
DocumentFormat.OpenXml.Drawing.Extents extents = new DocumentFormat.OpenXml.Drawing.Extents();
//Bitmap bitmap = new Bitmap(ms);
if (width == null)
extents.Cx = (long)bitmap.Width * (long)((float)914400 / bitmap.HorizontalResolution);
else
extents.Cx = width * (long)((float)914400 / bitmap.HorizontalResolution);
if (height == null)
extents.Cy = (long)bitmap.Height * (long)((float)914400 / bitmap.VerticalResolution);
else
extents.Cy = height * (long)((float)914400 / bitmap.VerticalResolution);
bitmap.Dispose();
t2d.Extents = extents;
ShapeProperties sp = new ShapeProperties();
sp.BlackWhiteMode = DocumentFormat.OpenXml.Drawing.BlackWhiteModeValues.Auto;
sp.Transform2D = t2d;
DocumentFormat.OpenXml.Drawing.PresetGeometry prstGeom = new DocumentFormat.OpenXml.Drawing.PresetGeometry();
prstGeom.Preset = DocumentFormat.OpenXml.Drawing.ShapeTypeValues.Rectangle;
prstGeom.AdjustValueList = new DocumentFormat.OpenXml.Drawing.AdjustValueList();
sp.Append(prstGeom);
sp.Append(new DocumentFormat.OpenXml.Drawing.NoFill());
DocumentFormat.OpenXml.Drawing.Spreadsheet.Picture picture = new DocumentFormat.OpenXml.Drawing.Spreadsheet.Picture();
picture.NonVisualPictureProperties = nvpp;
picture.BlipFill = blipFill;
picture.ShapeProperties = sp;
Position pos = new Position();
pos.X = x * 914400 / 72;
pos.Y = y * 914400 / 72;
Extent ext = new Extent();
ext.Cx = extents.Cx;
ext.Cy = extents.Cy;
AbsoluteAnchor anchor = new AbsoluteAnchor();
anchor.Position = pos;
anchor.Extent = ext;
anchor.Append(picture);
anchor.Append(new ClientData());
wsd.Append(anchor);
wsd.Save(dp);
}
catch (Exception ex)
{
throw ex; // or do something more interesting if you want
}
}
If I invoke the first method(use real file on the disk), it's ok, I can insert my picture into excel file. But if I read the file into memorystream and invoke method2, I can see the picture rectangle with error message.
So my question is how can I insert picture into excel via memorystream? Because I won't create too many files on disk.
I believe you need to create bitmap image data from stream first
There's a solution already for that here on Stack Overflow: Byte Array to Bitmap Image
I copy-paste the code from the solution:
int w= 100;
int h = 200;
int ch = 3; //number of channels (ie. assuming 24 bit RGB in this case)
byte[] imageData = new byte[w*h*ch]; //you image data here
Bitmap bitmap = new Bitmap(w,h,PixelFormat.Format24bppRgb);
BitmapData bmData = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadWrite, bitmap.PixelFormat);
IntPtr pNative = bmData.Scan0;
Marshal.Copy(imageData,0,pNative,w*h*ch);
bitmap.UnlockBits(bmData);
OK, I think I have found solution.
I change memorystream parameter to string, and convert it to memorystream like below:
MemoryStream ms = new System.IO.MemoryStream(System.Convert.FromBase64String(str))
Now, it works.
I learn it from open xml sdk 2.5 productivity tool.
I want to save a picture using the device native camera. Currently I cannot get the image to save to file. I have a rawimage which's texture is the native device camera image. I am taking the bytes from that rawimage and encoding to png. Then I write the png to file on my computer.
public WebCamTexture webCamTexture;
public RawImage myImage;
public void start () {
webCamTexture = new WebCamTexture ();
myImage.texture = webCamTexture;
myImage.transform.localScale = new Vector3 (1,-1,1);
webCamTexture.Play ();
int width = (int)GameObject.Find("myImage").GetComponent<Rect> ().width;
int height = (int)GameObject.Find("myImage").GetComponent<Rect>().height;
Texture2D tex = new Texture2D (width, height, TextureFormat.RGB24, false);
tex.ReadPixels (new Rect (0, 0, width, height), 0, 0);
tex.Apply ();
byte[] bytes = tex.EncodeToPNG ();
System.IO.File.WriteAllBytes(Application.dataPath + "/"+"imgcap.png",bytes);
Object.Destroy (tex);
}
using System;
using System.IO;
using System.Runtime.InteropServices;
using UnityEngine;
public class WebCamScreenShot : MonoBehaviour
{
public static int fileCounter = 0;
WebCamTexture webCamTexture;
public string path = "C:/temp/UnityScreenShots";
public string fileNamePrefix = "image_";
void Start()
{
var devices = WebCamTexture.devices;
if (devices.Length < 1) throw new System.Exception("No webcams was found");
var device = devices[0];
webCamTexture = new WebCamTexture(device.name);
webCamTexture.requestedFPS = 30;
webCamTexture.requestedWidth = 320;
webCamTexture.requestedHeight = 240;
webCamTexture.Play();
if (webCamTexture.width < 1 || webCamTexture.height < 1) throw new System.Exception("Invalid resolution");
}
public void SaveToPNG()
{
string zeros =
(fileCounter < 10000 ? "0000" :
(fileCounter < 1000 ? "000" :
(fileCounter < 100 ? "00" :
(fileCounter < 10 ? "0" : ""))));
string image_path = path + $"/{ fileNamePrefix + zeros + fileCounter }.png";
byte[] data = ScreenshotWebcam( webCamTexture );
File.WriteAllBytes(image_path, data);
fileCounter ++ ;
}
static byte[] ScreenshotWebcam(WebCamTexture wct)
{
Texture2D colorTex = new Texture2D(wct.width, wct.height, TextureFormat.RGBA32, false);
byte[] colorByteData = Color32ArrayToByteArray(wct.GetPixels32());
colorTex.LoadRawTextureData(colorByteData);
colorTex.Apply();
return colorTex.EncodeToPNG();
}
static byte[] Color32ArrayToByteArray(Color32[] colors)
{
// https://stackoverflow.com/a/21575147/2496170
if (colors == null || colors.Length == 0) return null;
int lengthOfColor32 = Marshal.SizeOf(typeof(Color32));
int length = lengthOfColor32 * colors.Length;
byte[] bytes = new byte[length];
GCHandle handle = default(GCHandle);
try
{
handle = GCHandle.Alloc(colors, GCHandleType.Pinned);
IntPtr ptr = handle.AddrOfPinnedObject();
Marshal.Copy(ptr, bytes, 0, length);
}
finally
{
if (handle != default(GCHandle)) handle.Free();
}
return bytes;
}
}
I try to capture desktop screenshot using SharpDX. My application is able to capture screenshot but without labels in Windows Explorer.
I tryed 2 solutions but without change. I tried find in documentation any information, but without change.
Here is mi code:
public void SCR()
{
uint numAdapter = 0; // # of graphics card adapter
uint numOutput = 0; // # of output device (i.e. monitor)
// create device and factory
var device = new SharpDX.Direct3D11.Device(SharpDX.Direct3D.DriverType.Hardware);
var factory = new Factory1();
// creating CPU-accessible texture resource
var texdes = new SharpDX.Direct3D11.Texture2DDescription
{
CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.Read,
BindFlags = SharpDX.Direct3D11.BindFlags.None,
Format = Format.B8G8R8A8_UNorm,
Height = factory.Adapters1[numAdapter].Outputs[numOutput].Description.DesktopBounds.Height,
Width = factory.Adapters1[numAdapter].Outputs[numOutput].Description.DesktopBounds.Width,
OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
MipLevels = 1,
ArraySize = 1
};
texdes.SampleDescription.Count = 1;
texdes.SampleDescription.Quality = 0;
texdes.Usage = SharpDX.Direct3D11.ResourceUsage.Staging;
var screenTexture = new SharpDX.Direct3D11.Texture2D(device, texdes);
// duplicate output stuff
var output = new Output1(factory.Adapters1[numAdapter].Outputs[numOutput].NativePointer);
var duplicatedOutput = output.DuplicateOutput(device);
SharpDX.DXGI.Resource screenResource = null;
SharpDX.DataStream dataStream;
Surface screenSurface;
var i = 0;
var miliseconds = 2500000;
while (true)
{
i++;
// try to get duplicated frame within given time
try
{
SharpDX.DXGI.OutputDuplicateFrameInformation duplicateFrameInformation;
duplicatedOutput.AcquireNextFrame(miliseconds, out duplicateFrameInformation, out screenResource);
}
catch (SharpDX.SharpDXException e)
{
if (e.ResultCode.Code == SharpDX.DXGI.ResultCode.WaitTimeout.Result.Code)
{
// this has not been a successful capture
// thanks #Randy
// keep retrying
continue;
}
else
{
throw e;
}
}
device.ImmediateContext.CopyResource(screenResource.QueryInterface<SharpDX.Direct3D11.Resource>(), screenTexture);
screenSurface = screenTexture.QueryInterface<Surface>();
// screenSurface.Map(SharpDX.DXGI.MapFlags.Read, out dataStream);
int width = output.Description.DesktopBounds.Width;
int height = output.Description.DesktopBounds.Height;
var boundsRect = new System.Drawing.Rectangle(0, 0, width, height);
var mapSource = device.ImmediateContext.MapSubresource(screenTexture, 0, MapMode.Read, SharpDX.Direct3D11.MapFlags.None);
using (var bitmap = new System.Drawing.Bitmap(width, height, PixelFormat.Format32bppArgb))
{
// Copy pixels from screen capture Texture to GDI bitmap
var bitmapData = bitmap.LockBits(boundsRect, ImageLockMode.WriteOnly, bitmap.PixelFormat);
var sourcePtr = mapSource.DataPointer;
var destinationPtr = bitmapData.Scan0;
for (int y = 0; y < height; y++)
{
// Copy a single line
Utilities.CopyMemory(destinationPtr, sourcePtr, width * 4);
// Advance pointers
sourcePtr = IntPtr.Add(sourcePtr, mapSource.RowPitch);
destinationPtr = IntPtr.Add(destinationPtr, bitmapData.Stride);
}
// Release source and dest locks
bitmap.UnlockBits(bitmapData);
device.ImmediateContext.UnmapSubresource(screenTexture, 0);
bitmap.Save(string.Format(#"d:\scr\{0}.png", i));
}
// var image = FromByte(ToByte(dataStream));
//var image = getImageFromDXStream(1920, 1200, dataStream);
//image.Save(string.Format(#"d:\scr\{0}.png", i));
// dataStream.Close();
//screenSurface.Unmap();
screenSurface.Dispose();
screenResource.Dispose();
duplicatedOutput.ReleaseFrame();
}
}
After few hours of research and googling i found working solution:
From:
PixelFormat.Format32bppArgb
To:
PixelFormat.Format32bppRgb
I try to use this code to get pictures of my cam:
IGraphBuilder _graph = null;
ISampleGrabber _grabber = null;
IBaseFilter _sourceObject = null;
IBaseFilter _grabberObject = null;
IMediaControl _control = null;
// Create the main graph
_graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder;
// Create the webcam source
_sourceObject = FilterInfo.CreateFilter(_monikerString);
// Create the grabber
_grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber;
_grabberObject = _grabber as IBaseFilter;
// Add the source and grabber to the main graph
_graph.AddFilter(_sourceObject, "source");
_graph.AddFilter(_grabberObject, "grabber");
IPin pin = _sourceObject.GetPin(PinDirection.Output, 0);
IAMStreamConfig streamConfig = pin as IAMStreamConfig;
int count = 0, size = 0;
streamConfig.GetNumberOfCapabilities(out count, out size);
int width = 0, height = 0;
AMMediaType mediaType = null;
AMMediaType mediaTypeCandidate = null;
for(int index = 0; index < count; index++) {
VideoStreamConfigCaps scc = new VideoStreamConfigCaps();
int test = streamConfig.GetStreamCaps(index, out mediaTypeCandidate, scc);
if(mediaTypeCandidate.MajorType == MediaTypes.Video && mediaTypeCandidate.SubType == MediaSubTypes.YUY2) {
VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaTypeCandidate.FormatPtr, typeof(VideoInfoHeader));
if(header.BmiHeader.Width == 1280 && header.BmiHeader.Height == 720) {
width = header.BmiHeader.Width;
height = header.BmiHeader.Height;
if(mediaType != null)
mediaType.Dispose();
mediaType = mediaTypeCandidate;
} else
mediaTypeCandidate.Dispose();
} else
mediaTypeCandidate.Dispose();
}
streamConfig.SetFormat(mediaType);
And it works but i do not see the Image which is generated by this code:
uint pcount = (uint)(_capGrabber.Width * _capGrabber.Height * PixelFormats.Bgr32.BitsPerPixel / 8);
// Create a file mapping
_section = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, pcount, null);
_map = MapViewOfFile(_section, 0xF001F, 0, 0, pcount);
// Get the bitmap
BitmapSource = Imaging.CreateBitmapSourceFromMemorySection(_section, _capGrabber.Width,
_capGrabber.Height, PixelFormats.Bgr32, _capGrabber.Width * PixelFormats.Bgr32.BitsPerPixel / 8, 0) as InteropBitmap;
_capGrabber.Map = _map;
// Invoke event
if (NewBitmapReady != null)
{
NewBitmapReady(this, null);
}
Because the SubMediaTyp is YUY2. How can i add a converter to this code? I have read something about a ColorConvert, which can be added to the IGraphBuilder. How does that work?
I would not expect CreateBitmapSourceFromMemorySection to accept anything else than flavors of RGB. Even more unlikely that it accepts YUY2 media type, so you need the DirectShow pipeline to convert video stream to RGB before you export it as a managed bitmap/imaging object.
To achieve this, you typically add Sample Grabber filter initialized to 24-bit RGB subtype and let DirectShow provide necessary converters automatically.
See detailed explanation and code snippets here: DirectShow: Examples for Using SampleGrabber for Grabbing a Frame and...
media.majorType = MediaType.Video;
media.subType = MediaSubType.RGB24;
media.formatPtr = IntPtr.Zero;
hr = sampGrabber.SetMediaType(media);