Edit: Added code (Exception on line 095, 5th time it's hit.)
public DataTable ParseBarcodes(String[] files, BarcodeZoneScan[] scanParameters)
{
message = null;
//gmseBitmap img = null;
gmseBitmap rotImg = null;
gmseBitmap parseImage = null;
gmseBitmap tempImage = null;
DataTable codes = new DataTable();
codes.Columns.Add("PageNumber");
codes.Columns.Add("Text");
codes.Columns.Add("Type");
codes.Columns.Add("RegionName");
try
{
gmseBarcodeInfoCollection bcc;
gmseBarcodeReaderParameter param = new gmseBarcodeReaderParameter();
gmseLicense.License = "plaintext license key ommited";
String dvImageName;
int searchCount = 0;
for (int dvCount = 0; dvCount < files.Length; dvCount++)
{
if (cancelled) //If cancelled, end the loops
{
dvCount = files.Length;
break;
}
dvImageName = files[dvCount].ToString();
using (gmseBitmap img = new gmseBitmap(dvImageName))
{
int framecount = img.GetFrameCount();
for (int e = 0; e < framecount; e++)
{
for (int j = 0; j < scanParameters.Length; j++)
{
if (scanParameters[j].Range == PageRange.All ||//All
(scanParameters[j].Range == PageRange.Even && (searchCount == 0 || searchCount % 2 == 0)) || //even
(scanParameters[j].Range == PageRange.Odd && (searchCount != 0 && searchCount % 2 != 0)) ||
(scanParameters[j].Range == PageRange.First && searchCount == 0))
{
//Setup what barcodes are going to be search for
param.BarcodeType = 0;
if (scanParameters[j].BarcodeTypes == BarcodeType.All) //All
{
param.BarcodeType = (int)gmseBarcodeType.All;
}
else
{
if ((scanParameters[j].BarcodeTypes & BarcodeType.Code39) != 0) //Code 39
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.Code39;
if ((scanParameters[j].BarcodeTypes & BarcodeType.Code11) != 0) //Code 11
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.Code11;
if ((scanParameters[j].BarcodeTypes & BarcodeType.Code93) != 0) //Code 93
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.Code93;
if ((scanParameters[j].BarcodeTypes & BarcodeType.Code128) != 0) //Code 128
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.Code128;
if ((scanParameters[j].BarcodeTypes & BarcodeType.Ean8) != 0) //EAN 8
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.EAN8;
if ((scanParameters[j].BarcodeTypes & BarcodeType.Ean13) != 0) // EAN 13
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.EAN13;
if ((scanParameters[j].BarcodeTypes & BarcodeType.I2of5) != 0) //I2of5
param.BarcodeType = param.BarcodeType | (int)gmseBarcodeType.i2of5;
}
param.IgnoreCheckSum = 1;
param.ReadMode = gmseBarcodeReadMode.WholeBitmap;
using (rotImg = new gmseBitmap(img.ExtractFrame(e)))
{
// do some basic image enhancement for better results
rotImg.ChangePixelFormat(System.Drawing.Imaging.PixelFormat.Format32bppArgb);
rotImg.SelectActiveFrame(e);
if (scanParameters[j].WholePage)
{
parseImage = rotImg.ExtractFrame(e);
}
else
{
using (tempImage = rotImg.ExtractFrame(e))
{
Rectangle convertedRect = returnConvertedRectangle(tempImage, scanParameters[j].Dimensions);
if (convertedRect.IntersectsWith(new Rectangle(0, 0, tempImage.Width, tempImage.Height)))
{
//GC.Collect(); //Test so I can see what objects are still alive in dump
parseImage = tempImage.CopyRectangle(convertedRect); //Exception here
}
}
}
}
//rotImg.Dispose();
//rotImg = null;
if (parseImage != null)
{
//Now we will apply the image enhancements:
if (scanParameters[j].Enhancements != ImageEnhancement.None)
{
rotImg = EnhanceImage(parseImage, scanParameters[j].Enhancements);
parseImage.Dispose();
parseImage = null;
}
if ((scanParameters[j].BarcodeScanDirection & ScanDirection.LeftToRight) != 0 && !cancelled)
{
if (parseImage == null)
{
tempImage = new gmseBitmap(rotImg.Image, 1);
}
else
{
tempImage = new gmseBitmap(parseImage.Image, 1);
}
bcc = tempImage.ReadBarcodes(param);
foreach (gmseBarcodeInfo bc in bcc)
{
addBarcode(codes, new object[] { searchCount, bc.Text, gmseBarcodeTypeConvert(bc.BarcodeType), scanParameters[j].ZoneName });
}
tempImage.Dispose();
tempImage = null;
}
if ((scanParameters[j].BarcodeScanDirection & ScanDirection.RightToLeft) != 0 && !cancelled)
{
if (parseImage == null)
{
tempImage = new gmseBitmap(rotImg.Image, 1);
}
else
{
tempImage = new gmseBitmap(parseImage.Image, 1);
}
tempImage.RotateFlip(RotateFlipType.Rotate180FlipNone);
bcc = tempImage.ReadBarcodes(param);
foreach (gmseBarcodeInfo bc in bcc)
{
addBarcode(codes, new object[] { searchCount, bc.Text, gmseBarcodeTypeConvert(bc.BarcodeType), scanParameters[j].ZoneName });
}
tempImage.Dispose();
tempImage = null;
}
if ((scanParameters[j].BarcodeScanDirection & ScanDirection.TopToBottom) != 0 && !cancelled)
{
if (parseImage == null)
{
tempImage = new gmseBitmap(rotImg.Image, 1);
}
else
{
tempImage = new gmseBitmap(parseImage.Image, 1);
}
tempImage.RotateFlip(RotateFlipType.Rotate90FlipNone);
bcc = tempImage.ReadBarcodes(param);
foreach (gmseBarcodeInfo bc in bcc)
{
addBarcode(codes, new object[] { searchCount, bc.Text, gmseBarcodeTypeConvert(bc.BarcodeType), scanParameters[j].ZoneName });
}
tempImage.Dispose();
tempImage = null;
}
if ((scanParameters[j].BarcodeScanDirection & ScanDirection.BottomToTop) != 0 && !cancelled)
{
if (parseImage == null)
{
tempImage = new gmseBitmap(rotImg.Image, 1);
}
else
{
tempImage = new gmseBitmap(parseImage.Image, 1);
}
tempImage.RotateFlip(RotateFlipType.Rotate270FlipNone);
bcc = tempImage.ReadBarcodes(param);
foreach (gmseBarcodeInfo bc in bcc)
{
addBarcode(codes, new object[] { searchCount, bc.Text, gmseBarcodeTypeConvert(bc.BarcodeType), scanParameters[j].ZoneName });
}
tempImage.Dispose();
tempImage = null;
}
if (parseImage != null)
{
parseImage.Dispose();
parseImage = null;
}
if (rotImg != null)
{
rotImg.Dispose();
rotImg = null;
}
}
}
}
searchCount++;
if (cancelled) //If cancelled, end the loops
{
e = framecount;
dvCount = files.Length;
}
}
} //end using img
//img.Dispose();
//img = null;
}
}
catch (Exception ex)
{
message = ex.Message;
}
finally
{
if (img != null)
{
img.Dispose();
img = null;
}
if (rotImg != null)
{
rotImg.Dispose();
rotImg = null;
}
if (tempImage != null)
{
tempImage.Dispose();
tempImage = null;
}
if (parseImage != null)
{
parseImage.Dispose();
parseImage = null;
}
}
if (!String.IsNullOrEmpty(message))
throw new Exception(message);
return codes;
}
We use this GMSE Imaging plugin to assist in OCR reading barcodes from scans, it deals with skew by rotating the image by 10 degrees until it gets a read. A bug was discovered where scanning different sized sheets would throw an error.
I traced it from our main program to one of our DLLs, where I found it was catching an OutOfMemoryException.
The original TIF is 300kb, but there is a fair amount of copying done to rotate the images. (between 4 bitmaps)
However I have followed the program through and monitored the locals and it appears that each bitmap is being disposed and assigned null correctly before the method at fault loops.
I've also tried adding GC.Collect() at the end of my loop.
I am on a 32bit W7 machine, which I have read has 2GB limit per object, with copious amounts of RAM so nothing lacking that that respect.
Been watching it on Task Manager and my RAM usage only goes from 1.72GB to 1.78GB.
This has been a tricky one to research, as OoM seems to be an unusual occurring error.
I was wondering if anyone had any advice in dealing with this kind of exception? I'm not a Visual Studio master, is there an easy way of monitoring resources/memory usage?
Or knows of any utilities I can use to assist?
Dumping the error message here, not sure how useful the code snippets would be in this situation...
System.OutOfMemoryException was caught
Message=Out of memory.
Source=System.Drawing
StackTrace:
at System.Drawing.Bitmap.Clone(Rectangle rect, PixelFormat format)
at gmse.Imaging.gmseBitmap.CopyRectangle(Rectangle r)
at ImagingInterface.ImagingFunctions.ParseBarcodes(String[] files, BarcodeZoneScan[] scanParameters) in C:\Working\Scan.backup\Global Dlls\v2.6.0.02\ScanGlobalDlls\ImagingInterface\ImagingFunctions.cs:line 632
InnerException:
(currently reading more into GC/Memory management http://msdn.microsoft.com/en-us/library/ee851764.aspx )
Working on a step of this guide, using SOS debugger in the Immediate window, with the aim of pinpointing whether the exception is generated from managed or unmanaged code.
Steps from above have indicated it's a problem with the managed code, as exception type from SOS is shown.
Exception object: 39594518
Exception type: System.OutOfMemoryException
Message: <none>
InnerException: <none>
StackTrace (generated):
The Heapdump I took doesn't seem to be thousands of bitmaps like I had kinda expected. Not 100% sure how to interpret the dump so seeing what I can find on it.
Not sure where to move from here right now! (searches..)
edit:
I have been trying to apply the lessons in this blog to my problem.
Started with PerfMon
This graph shows my program from execution to where it catches the exception.
The first two sharp peaks occur after triggering parsing of the scanned image, the last drop off occurs when the exception is caught.
Q: Compare the curves for Virtual Bytes, Private Bytes and #Bytes in all Heaps, do they follow eachother or do they diverge?
What is the significance of #Bytes in all heaps diverging from? (As its flat on mine)
Examined Memory with !address -summary
MEM_IMAGE corresponded PrivateBytes(113MB) pretty much spot on.
Q: Where is most of the memory going (which RegionType)?
RegionUsageFree 87.15%
RegionUsageIsVAF 5.64% (Busy 43.89%) [memory allocated through VirtualAlloc]
RegionUsageImage 5.54% (Busy 43.13%) [Memory that is mapped to a file that is part of an executable image.]
In WinDbg with SOS loaded, I did a !DumpHeap
//...
7063424c 1201 28824 System.Collections.ArrayList
706228d4 903 28896 System.EventHandler
7062f640 1253 30072 System.RuntimeType
6ec2be78 833 31216 System.Windows.Forms.PropertyStore+IntegerEntry[]
6ec2b0a4 654 34008 System.Windows.Forms.CreateParams
7063547c 318 35472 System.Collections.Hashtable+bucket[]
6ec2aa5c 664 37184 System.Windows.Forms.Control+ControlNativeWindow
70632938 716 40400 System.Int32[]
6c546700 48 49728 System.Data.RBTree`1+Node[[System.Data.DataRow, System.Data]][]
70634944 85 69600 System.Byte[]
6ec2b020 931 85972 System.Windows.Forms.PropertyStore+ObjectEntry[]
6c547758 156 161616 System.Data.RBTree`1+Node[[System.Int32, mscorlib]][]
705e6c28 2107 238912 System.Object[]
00305ce8 18 293480 Free
7062f9ac 5842 301620 System.String
Total 35669 objects
And here are the top memory hogging objects.
I was hoping something would stick out like a sore thumb, like a giant amount of bitmaps or something. Is anything here scream out "I'm acting unusually!" to anyone?
(I am trying to examine the top ones individually for suspect things, but would be nice to narrow down the possible culprits a bit more)
This page (Address summary explained) has been a big help.
However C# is my first language, so I have no prior experience debugging memory issues. Would like to know if I am on the right track (Is GC an issue at all?) as I haven't found anything that's given me any clear indications yet.
Answer: Problem was caused in 3rd party library. Nothing I can do.
Found out through deliberation and some tests with stripped down code involving just the method producing the error.
Bounty awarded to what I felt I learnt the most from.
Okay, the added info helps. The problem is not that your program uses too much memory, it uses too little. The garbage collected heap has very little data in it. That's not uncommon for a program that manipulates bitmaps. The Bitmap class is a very small wrapper around GDI+ functions, it uses only a handful of bytes in the GC heap. So you can create an enormous amount of bitmaps before you fill up the gen #0 heap and trigger a garbage collection. This is also visible from Perfmon, you want to look at the .NET CLR Memory, Gen 0 Collections counter. A healthy program triggers a collection about 10 times per second when it is doing work.
Not getting collections is fine, but there's something else that doesn't work when there are no collections. The finalizer thread never runs. Finalizers are important to release unmanaged resources other than memory. Like operating system handles and any unmanaged memory pointers held by managed objects. Bitmap has those.
First thing to do is to run Taskmgr.exe, Processes tab. Click View + Select Columns and tick Handles, USER objects and GDI objects. Observe these counters while your program is running. If you see one climbing up without bound then you have a problem that could cause GDI+ to generate an OOM exception. GDI objects being the common cause.
Carefully review your code and check that you are calling Dispose() on any Image or Bitmap that you no longer use. Beware of the subtle ones, like assigning the Image property of a PictureBox. You'd have to dispose the old one if it isn't null. That's painful of course and it is too easy to miss one. So use a simple strategy, count the number of bitmaps you created and, say, on the hundredth call GC.Collect + GC.WaitForPendingFinalizers() to trigger a collection and a finalizer sweep.
In the past I've always used ANTS Memory Profiler to troubleshot this sort of thing. Its not free but it works pretty pretty well for memory/reference leaks in managed code. You just take a couple of snapshots when the application should be at steady state and look at the changes.
You can safely add a using block around the img variable and with a little refactoring you can do the same to the other image-variables you are declaring.
That should at least make code more readable, and reduce the chance to forget to add one to the finally block; I may even contribute to solving the problem. You seem to be manually disposing each and every created image object, though.
Related
i am new to WIA. And i have been asked to make scaning service scan faster and duplex. My current service scan one page, then put it in pdf and so on untill there is less then 20 pages(this number just a crutch used before me, will be glad if someone explane how to get "if there is any paper in there" variable). I started to dig and found docs on MSDN describing properties and after found this post describing duplex sanning, but with mysterious 5 in set. After I found this and figured out what i need WIA_DPS_DOCUMENT_HANDLING_SELECT to set to 0x205(FEEDER + DUPLEX + AUTO_ADVANCE). So I tried to setup them like this:
private static void SetProperty(Property property, int value)
{
IProperty x = (IProperty)property;
Object val = value;
x.set_Value(ref val);
}
...some code...
foreach (Property prop in device.Properties)
{
//LOGGER.Warn(prop.Name);
//LOGGER.Warn(prop.PropertyID);
switch ((Int32)prop.PropertyID)
{
// Document Handling Select
case 3088:
SetProperty(prop, 517);
break;
// Pages
case 3096:
SetProperty(prop, 1);
break;
}
}
And it did't worked for me... It just stuck on setting... Can Somebody explain how to setup AUTO_ADVANCE and DUPLEX props? Or maybe "make scanning faster and duplex" need something more then just AUTO_ADVANCE and DUPLEX and my perception about them is wrong? Or I should considering "ISIS / TWAIN (Windows XP / Vista / 7 / 8 / 8.1 / 10)" string in my scan description and use other libraries?
(Window 10, Canon DR-M160||, DR-M160 & DR-M160II Driver for Windows)
and also here is the current fetch function:
public List<ImageFile> FetchImageList()
{
List<ImageFile> imageList = new List<ImageFile>();
//bool hasMorePages = true;
int testcount = 0;
while (testcount >= 0)
{
testcount--;
WIA.Device device = FindDevice(_deviceId);
if (device == null)
{
LOGGER.Warn("Scanner device not found");
return null;
}
// get item
WIA.Item scanItem = device.Items[1] as WIA.Item;
LOGGER.Debug($"ScanItem: {scanItem.ItemID}");
try
{
foreach (Property prop in device.Properties)
{
//LOGGER.Warn(prop.Name);
//LOGGER.Warn(prop.PropertyID);
switch ((Int32)prop.PropertyID)
{
// Document Handling Select
case 3088:
LOGGER.Warn("here");
SetProperty(prop, 517);
LOGGER.Warn("here");
break;
// Pages
case 3096:
SetProperty(prop, 1);
break;
}
}
// scan image
WIA.ICommonDialog wiaCommonDialog = new WIA.CommonDialog();
WIA.ImageFile image = (WIA.ImageFile)scanItem.Transfer(WIA.FormatID.wiaFormatPNG);
imageList.Add(image);
LOGGER.Warn("Front");
//get back side
image = (WIA.ImageFile)scanItem.Transfer(WIA.FormatID.wiaFormatPNG);
imageList.Add(image);
LOGGER.Warn("Back");
}
catch (Exception e)
{
throw (e);
}
}
return imageList;
}
Well... I tried to make duplex scan without AUTO_ADVANCE and got HRESULT: 0x8000FFFF (E_UNEXPECTED) on Transfer call. According to this post(even though that was on Windows 7) I guess there is no way to solve this for me by using WIA, still hope there will other suggestions...
Solved problem
I used saraff.twain and it worked for me:
- git page :https://github.com/saraff-9EB1047A4BEB4cef8506B29BA325BD5A/Saraff.Twain.NET
good library with grate wiki page.(Also have similar library for .net 4.6.1)
I am making a scanning component, but when I set a high resolution for the document (600 dpi), I tend to get System.OutOfMemoryException after just 1 or 2 scans.
My code is as follows
public ScannedImage SaveScannedImage(DataTransferredEventArgs e)
{
if (e == null) throw new IOException();
BitmapSource fullResImage;
using (var fullResImageStream = e.GetNativeImageStream())
{
fullResImage = fullResImageStream.ConvertToWpfBitmap(e.ImageInfo.ImageWidth, e.ImageInfo.ImageLength);
}
BitmapSource lowResImage;
using (var lowResImageStream = e.GetNativeImageStream())
{
lowResImage = lowResImageStream.ConvertToWpfBitmap(800, 0);
}
return new ScannedImage(lowResImage, fullResImage);
}
It is usually happening at the
using (var lowResImageStream = e.GetNativeImageStream())
Help would be much appreciated.
What you see may be caused by large object heap (LOH) fragmentation.
That is hard to avoid, but you can compact the LOH explicitely.
GCSettings.LargeObjectHeapCompactionMode =
GCLargeObjectHeapCompactionMode.CompactOnce;
GC.Collect();
Also, make sure you run as a 64 bit process. Turn of the "Prefer 32 bit" option if it is on.
For more information, you might want to read
https://msdn.microsoft.com/en-us/library/system.runtime.gcsettings.largeobjectheapcompactionmode.aspx
https://blogs.msdn.microsoft.com/ericlippert/2009/06/08/out-of-memory-does-not-refer-to-physical-memory/
While running some test code in OpenCL (using Cloo C#), I started getting these OutOfResource errors from OpenCL and sometimes Unity just crashes entirely before I get an exception. I am basically re-calling a kernel function over and over with varying number of global/local work items to check timing. I leave the arguments the same and call the kernel starting with 2x2x2 global and 2x2x2 local and iterating uperwards checking only valid sizes. It works fine occasionally, but most of the time it completes about 30 or 40 Execute() calls and then crashes on the next Execute() call.
Note: Execute refers to the OpenCL.dll on the computer. The stack trace Unity returns is NULL I assume because of the native code.
Anyone have any idea what could be causing this?
Note: This version of Cloo is Cloo-Unity from GitHub and I am using it in Unity. The equivalent OpenCL function being called when I get the error is clEnqueueNDRangeKernel(), but it is called Execute() in Cloo.
Code Sample:
//Setup inputs one time...
foreach (var input in p_inputs)
{
inputs.Add(input.Function, input);
profiles.Add(input.Function, new RunProfile(input.Function, input.Weight));
input.Input.Prepare(package[input.Function]);
}
//Profile...
DateTime start;
int g_state = 0;
int l_state = 0;
long[] g = new long[3] { 2, 2, 2 };
long[] l = new long[3] { 2, 2, 2 };
while(g[0] * g[1] * g[2] < Device.MaxWorkGroupSize)
{
l[0] = 2; l[1] = 2; l[2] = 2; l_state = 0; //Reset locals
bool proceed = true;
while(proceed)
{
proceed = (l[0] != g[0] || l[1] != g[1] || l[2] != g[2]);
if (CLUtilities.ValidateExecutionParameters(Device, g, l))
{
Debug.Log("Profiling Start: " + g.ToEnumeratedString() + " / " + l.ToEnumeratedString());
foreach (var profile in profiles)
{
start = DateTime.Now;
//Exception here when on (g=6x4x4, l=6x4x4)
package.Execute(package[profile.Key], g, l);
package.Commands.Flush();
package.Commands.Finish();
float time = (float)(DateTime.Now - start).TotalMilliseconds;
profile.Value.AddRun(g, l, time);
}
Debug.Log("Profiling Ending: " + g.ToEnumeratedString() + " / " + l.ToEnumeratedString());
}
l[l_state] += 2;
l_state = (l_state == 2) ? 0 : l_state + 1;
}
g[g_state] += 2;
g_state = (g_state == 2) ? 0 : g_state + 1;
}
Sorry i cannot comment cause less than 50 rep. but which operating system do you use? gpu? driver?
i got similar problems caused by opencl.dll i used win10 and Nvidia (x64).
Also have a look on https://social.technet.microsoft.com/Forums/en-US/85680348-c2c4-40bc-9f39-9dcfeea331c0/windows-10-opencldll-error?forum=win10itprogeneral
It seems that there is/was a issue with the memory compression in win10.
My problem was caused by updating win7 to win10, without updating the nvidia drivers.
I just got back around to posting this, but the issue turned out be related to the fact that I didn't recall Kernel.SetArgument() each time I called the Execute() method. I originally did this because I was worried it would re-copy the buffer, but as it turns out the buffer copy doesn't occur in this method anyway (so the overhead was small anyway).
Does your nvidia graphics card for display?
If nvidia is main graphics card, you have to edit registry to turn off watchdog.
for windows 7
system/current/control/graphicsdriver
TdrLevel(DWORL) : 0
I got out of memory exception problem for 4 months. My client use webservice, they wanna me test their webservice. In their webservice, there is a function call upload. I test that function on 1500 users who uploaded at the same time. I tried garbage collection function of visual studio (GC). With 2mb of file, there is not exception, but with 8mb of file there is still out of memory exception. I have tried many times and a lot of solutions but still happened. I gonna crazy now. When upload is on going, I watched memory of all test computers but memory is not out of. So I think that problem is from webservice and server. But my client said that i have to improve those reasons which is from webservice and server to them. I'm gonna crazy now. Do you guys have any solotions for this? In additional, Our client does not public code, I just use webservice's function to test. Additional, I have to use vps to connect their webservice and network rather slow when connect to vps.
I have to make sure that my test script doesn't have any problem. Here is my test script to test upload function.
public void UploadNewJob(string HalID, string fileUID, string jobUID, string fileName, out List errorMessages)
{
errorMessages = null;
try
{
int versionNumber;
int newVersionNumber;
string newRevisionTag;
datasyncservice.ErrorObject errorObj = new datasyncservice.ErrorObject();
PfgDbJob job = new PfgDbJob();
job.CompanyName = Constant.SEARCH_CN;
job.HalliburtonSalesOffice = Constant.SEARCH_SO;
job.HalliburtonOperationsLocation = Constant.SEARCH_OL;
job.UploadPersonHalId = HalID;
job.CheckOutState = Constant.CHECKOUT_STATE;
job.RevisionTag = Constant.NEW_REVISION_TAG;
var manifestItems = new List();
var newManifestItems = new List();
var manifestItem = new ManifestItem();
if (fileUID == "")
{
if (job.JobUid == Guid.Empty)
job.JobUid = Guid.NewGuid();
if (job.FileUid == Guid.Empty)
job.FileUid = Guid.NewGuid();
}
else
{
Guid JobUid = new Guid(jobUID);
job.JobUid = JobUid;
Guid fileUid = new Guid(fileUID);
job.FileUid = fileUid;
}
// Change the next line when we transfer .ssp files by parts
manifestItem.PartUid = job.FileUid;
job.JobFileName = fileName;
manifestItem.BinaryFileName = job.JobFileName;
manifestItem.FileUid = job.FileUid;
manifestItem.JobUid = job.JobUid;
manifestItem.PartName = string.Empty;
manifestItem.SequenceNumber = 0;
manifestItems.Add(manifestItem);
errorMessages = DataSyncService.Instance.ValidateForUploadPfgDbJobToDatabase(out newVersionNumber, out newRevisionTag, out errorObj, out newManifestItems, HalID, job, false);
if (manifestItems.Count == 0)
manifestItems = newManifestItems;
if (errorMessages.Count > 0)
{
if (errorMessages.Count > 1 || errorMessages[0].IndexOf("NOT AN ERROR") == -1)
{
return;
}
}
//upload new Job
Guid transferUid;
long a= GC.GetTotalMemory(false);
byte[] fileContents = File.ReadAllBytes(fileName);
fileContents = null;
GC.Collect();
long b = GC.GetTotalMemory(false);
//Assert.Fail((b - a).ToString());
//errorMessages = DataSyncService.Instance.UploadFileInAJob(out transferUid, out errorObj, job.UploadPersonHalId, job, manifestItem, fileContents);
DataSyncService.Instance.UploadPfgDbJobToDatabase(out errorObj, out versionNumber, job.UploadPersonHalId, job, false, manifestItems);
}
catch (Exception ex)
{
Assert.Fail("Error from Test Scripts: " + ex.Message);
}
}
Please review my test code. And if there is not any problem from my test code, I have to improve reason is not from my test code T_T
My guess would be that you hit the 2 GB object size limit of .NET (1500 * 8MB > 4GB).
You should consider to change to .NET 4.5 and use the large object mode - see here - the setting is called gcAllowVeryLargeObjects.
We are using Win32 calls to the GDI Printing API to print graphics.
I've noticed that on certain PCL print queues, we leak GDI memory when printing. As far as I can tell everything is being disposed of properly. Not being an admin at my company it's rather difficult for me to tell which drivers print queues are using. PCL6 drivers appear to be the main culprit. Other seemingly unrelated issues I've seen:
System.ComponentModel.Win32Exception: The data area passed to a system call is too small
System.ComponentModel.Win32Exception: The operation completed successfully
var hdc = GdiPrintWin32.CreateDC(IntPtr.Zero, printerName, IntPtr.Zero, IntPtr.Zero);
if (hdc == IntPtr.Zero)
throw GetError("Device context failed for {0}", printerName);
try
{
if (GdiPrintWin32.StartDoc(hdc, ref doc) <= 0)
throw GetError("Failed to start print job");
This logic is currently being done in a WCF hosted inside a Windows service. When I set the instance mode to single and hammer it, I only get it the first couple calls; when I set the instance mode to per call, it's a lot easier to reproduce.
I've also seen errors where basically no calls can go through to the service for about 20 minutes and requires a restart to fix. I notice that after stopping the service there are Windows Event logs to the effect of:
2 user registry handles leaked from \Registry\User...: Process 3792 (\Device\HarddiskVolume1\Windows\System32\rundll32.exe) has opened key \REGISTRY\USER... Process 3792 (\Device\HarddiskVolume1\Windows\System32\rundll32.exe) has opened key ...\Software\Hewlett-Packard\HP SSNP
Basically it's been nothing but a nightmare for us. Has anyone had any experience with anything like this?
The guts of the code we're using to print are:
var doc = new Docinfo { DocName = printJobName };
// create a device-context
var hdc = GdiPrintWin32.CreateDC(IntPtr.Zero, printerName, IntPtr.Zero, IntPtr.Zero);
if (hdc == IntPtr.Zero)
throw GetError("Device context failed for {0}", printerName);
try
{
if (GdiPrintWin32.StartDoc(hdc, ref doc) <= 0)
throw GetError("Failed to start print job");
foreach (PrintingMetafile metafile in pages)
{
var bytes = metafile.GetBytes();
// load the bytes to a memory location and get the pointer
var inMemRef = GdiPrintWin32.SetEnhMetaFileBits((uint)bytes.Length, bytes);
if (inMemRef == IntPtr.Zero)
throw GetError("Failed to create EMF in memory");
// Get the pixel coordinates of the paper
var x = 0; // GdiPrintWin32.GetDeviceCaps(hdc, HORZSIZE);
var y = 0; // GdiPrintWin32.GetDeviceCaps(hdc, VERTSIZE);
var hres = GdiPrintWin32.GetDeviceCaps(hdc, HORZRES);
var vres = GdiPrintWin32.GetDeviceCaps(hdc, VERTRES);
var rect = new Rect { Left = x, Top = y, Right = hres, Bottom = vres };
if (GdiPrintWin32.StartPage(hdc) <= 0)
throw GetError("StartPage failed");
if (GdiPrintWin32.PlayEnhMetaFile(hdc, inMemRef, ref rect) == 0)
throw GetError("PlayEnhMetaFile failed");
if (GdiPrintWin32.EndPage(hdc) <= 0)
throw GetError("EndPage failed");
if (inMemRef != IntPtr.Zero)
{
GdiPrintWin32.DeleteEnhMetaFile(inMemRef);
}
}
if (GdiPrintWin32.EndDoc(hdc) <= 0)
throw GetError("Failed to finish print job");
}
finally
{
GdiPrintWin32.DeleteDC(hdc);
}