I have an application that recursively walks a very large (6 TB) folder. To speed things up, I create a new thread for each recursion. At one point my thread count was in excess of 12,000. As the task gets closer to completion, my thread count gets drops, but on Task Manager the thread count keeps climbing. I think that indicates that the threads are not being garbage collected when they finish.
At one point, my internal thread count showed 5575 threads while the Windows resource monitor showed the task using 33,023 threads.
static void Main(string[] args)
{
string folderName = Properties.Settings.Default.rootFolder;
ParameterizedThreadStart needleThreader = new ParameterizedThreadStart(needle);
Thread eye = new Thread(needleThreader);
threadcount = 1;
eye.Start(folderName);
}
static void needle(object objFolderName)
{
string folderName = (string)objFolderName;
FolderData folderData = getFolderData(folderName);
addToDB(folderData);
//since the above statement gets executed (my database table
//gets populated), I think the thread should get garbage collected
//here, but the windows thread count keeps climbing.
}
// recursive routine to walk directory structure and create annotated treeview
private static FolderData getFolderData(string folderName)
{
//Console.WriteLine(folderName);
long folderSize = 0;
string[] directories = new string[] { };
string[] files = new string[] { };
try
{
directories = Directory.GetDirectories(folderName);
}
catch { };
try
{
files = Directory.GetFiles(folderName);
}
catch { }
for (int f = 0; f < files.Length; f++)
{
try
{
folderSize += new FileInfo(files[f]).Length;
}
catch { } //cannot access file so skip;
}
FolderData folderData = new FolderData(folderName, directories.Length, files.Length, folderSize);
List<String> directoryList = directories.ToList<String>();
directoryList.Sort();
for (int d = 0; d < directoryList.Count; d++)
{
Console.Write(" " + threadcount + " ");
//threadcount is my internal counter. it increments here
//where i start a new thread and decrements when the thread ends
//see below
threadcount++;
ParameterizedThreadStart needleThreader = new ParameterizedThreadStart(needle);
Thread eye = new Thread(needleThreader);
eye.Start(directoryList[d]);
}
//thread is finished, so decrement
threadcount--;
return folderData;
}
Thanks to matt-dot-net's suggestion I spent a few hours research TPL (Task Parallel Library), and it was well worth it.
Here is my new code. It works blazingly fast, does not peg the CPU (uses 41% which is a lot but still plays nice in the sandbox), uses only about 160MB of memory (instead of nearly all of the 4GB available) and uses a maximum of about 70 threads.
You'd almost think I new what I was doing. But the .net TPL handles all the hard stuff, like determining the correct number of threads and making sure they clean up after themselves.
class Program
{
static object padlock = new object();
static void Main(string[] args)
{
OracleConnection ora = new OracleConnection(Properties.Settings.Default.ora);
ora.Open();
new OracleCommand("DELETE FROM SCRPT_APP.S_DRIVE_FOLDERS", ora).ExecuteNonQuery();
ora.Close();
string folderName = Properties.Settings.Default.rootFolder;
Task processRoot = new Task((value) =>
{
getFolderData(value);
}, folderName);
//wait is like join; it waits for this asynchronous task to finish.
processRoot.Start();
processRoot.Wait();
}
// recursive routine to walk directory structure and create annotated treeview
private static void getFolderData(object objFolderName)
{
string folderName = (string)objFolderName;
Console.WriteLine(folderName);
long folderSize = 0;
string[] directories = new string[] { };
string[] files = new string[] { };
try
{
directories = Directory.GetDirectories(folderName);
}
catch { };
try
{
files = Directory.GetFiles(folderName);
}
catch { }
for (int f = 0; f < files.Length; f++)
{
try
{
folderSize += new FileInfo(files[f]).Length;
}
catch { } //cannot access file so skip;
}
FolderData folderData = new FolderData(folderName, directories.Length, files.Length, folderSize);
List<String> directoryList = directories.ToList<String>();
directoryList.Sort();
//create a task for each subdirectory
List<Task> dirTasks = new List<Task>();
for (int d = 0; d < directoryList.Count; d++)
{
dirTasks.Add(new Task((value) =>
{
getFolderData(value);
}, directoryList[d]));
}
//start all tasks
foreach (Task task in dirTasks)
{
task.Start();
}
//wait fo them to finish
Task.WaitAll(dirTasks.ToArray());
addToDB(folderData);
}
private static void addToDB(FolderData folderData)
{
lock (padlock)
{
OracleConnection ora = new OracleConnection(Properties.Settings.Default.ora);
ora.Open();
OracleCommand addFolderData = new OracleCommand(
"INSERT INTO FOLDERS " +
"(PATH, FOLDERS, FILES, SPACE_USED) " +
"VALUES " +
"(:PATH, :FOLDERS, :FILES, :SPACE_USED) ",
ora);
addFolderData.BindByName = true;
addFolderData.Parameters.Add(":PATH", OracleDbType.Varchar2);
addFolderData.Parameters.Add(":FOLDERS", OracleDbType.Int32);
addFolderData.Parameters.Add(":FILES", OracleDbType.Int32);
addFolderData.Parameters.Add(":SPACE_USED", OracleDbType.Int64);
addFolderData.Prepare();
addFolderData.Parameters[":PATH"].Value = folderData.FolderName;
addFolderData.Parameters[":FOLDERS"].Value = folderData.FolderCount;
addFolderData.Parameters[":FILES"].Value = folderData.FileCount;
addFolderData.Parameters[":SPACE_USED"].Value = folderData.Size;
addFolderData.ExecuteNonQuery();
ora.Close();
}
}
}
}
Related
I have a loop creating three tasks:
List<Task> tasks = new List<Task>();
foreach (DSDevice device in validdevices)
{
var task = Task.Run(() =>
{
var conf = PrepareModasConfig(device, alternativconfig));
//CHECK-Point1
string config = ModasDicToConfig(conf);
//CHECK-Point2
if (config != null)
{
//Do Stuff
}
else
{
//Do other Stuff
}
});
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
it calls this method, where some data of a dictionary of a default-config gets overwritten:
private Dictionary<string, Dictionary<string, string>> PrepareModasConfig(DSDevice device, string alternativeconfig)
{
try
{
Dictionary<string, Dictionary<string, string>> config = new Dictionary<string, Dictionary<string, string>>(Project.project.ModasConfig.Config);
if (config.ContainsKey("[Main]"))
{
if (config["[Main]"].ContainsKey("DevName"))
{
config["[Main]"]["DevName"] = device.ID;
}
}
return config;
}
catch
{
return null;
}
}
and after that, it gets converted into a string with this method:
private string ModasDicToConfig(Dictionary<string, Dictionary<string, string>> dic)
{
string s = string.Empty;
try
{
foreach (string key in dic.Keys)
{
s = s + key + "\n";
foreach (string k in dic[key].Keys)
{
s = s + k + "=" + dic[key][k] + "\n";
}
s = s + "\n";
}
return s;
}
catch
{
return null;
}
}
But every Tasks gets the exact same string back.
On //CHECK-Point1 I check the Dic for the changed value: Correct Value for each Task
On //CHECK-Point2 I check the String: Same String on all 3 Tasks (Should be of course different)
Default-Dictionary looks like this: (shortened)
{
{"[Main]",
{"DevName", "Default"},
...
},
...
}
The resulting string look like that:
[Main]
DevName=003 <--This should be different (from Device.ID)
...
[...]
EDIT:
I moved the methods to execute outside the Task. Now I get the correct Results. So I guess it has something to do with the Task?
List<Task> tasks = new List<Task>();
foreach (DSDevice device in validdevices)
{
var conf = PrepareModasConfig(device, alternativconfig));
//CHECK-Point1
string config = ModasDicToConfig(conf);
//CHECK-Point2
var task = Task.Run(() =>
{
if (config != null)
{
//Do Stuff
}
else
{
//Do other Stuff
}
});
tasks.Add(task);
}
Task.WaitAll(tasks.ToArray());
The problem isn't caused by tasks. The lambda passed to Task.Run captures the loop variable device so when the tasks are executed, all will use the contents of that variable. The same problem would occur even without tasks as this SO question shows. The following code would print 10 times:
List<Action> actions = new List<Action>();
for (int i = 0; i < 10; ++i )
actions.Add(()=>Console.WriteLine(i));
foreach (Action a in actions)
a();
------
10
10
10
10
10
10
10
10
10
10
If the question's code used an Action without Task.Run it would still result in bad results.
One way to fix this is to copy the loop variable into a local variable and use only that in the lambda :
for (int i = 0; i < 10; ++i )
{
var ii=i;
actions.Add(()=>Console.WriteLine(ii));
}
The question's code can be fixed by copying the device loop variable into the loop:
foreach (DSDevice dev in validdevices)
{
var device=dev;
var task = Task.Run(() =>
{
var conf = PrepareModasConfig(device, alternativconfig));
Another way is to use Parallel.ForEach to process all items in parallel, using all available cores, without creating tasks explicitly:
Parallel.ForEach(validdevices,device=>{
var conf = PrepareModasConfig(device, alternativconfig));
string config = ModasDicToConfig(conf);
...
});
Parallel.ForEach allows limiting the number of worker tasks through the MaxDegreeOfParallelism option. It's a blocking call because it uses the current thread to process data along with any worker tasks.
Im trying to copy some photos from a remote path to local.
I check if photo exist then i write the product in a text file.
Im newbie with Parallel stuff
I try but it takes several minute with 6000 products and 12000 photos
private async void button4_ClickAsync(object sender, EventArgs e)
{
var context = new MyEntities();
var articoli = context.Articoli.AsNoTracking()
.Where(...)
.OrderByDescending(...)
.ThenBy(...)
.ToList();
var paths = new StringBuilder();
int count = 0;
int countFotoSmall = 0;
Parallel.ForEach(articoli, (articolo) =>
{
count++;
if (FotoExists($"{articolo.Codart}Small.jpg"))
{
paths.AppendLine(#"""/assets/items/" + articolo.Codart + #"Small.jpg"",");
countFotoSmall++;
}
});
using (var file = new StreamWriter(#"C:.../fotoSmallSW.txt"))
{
await file.WriteAsync(paths.ToString());
}
MessageBox.Show($"End successfully: articoli {count}, items Small.jpg {countFotoSmall}");
}
private bool FotoExists(string fotoSmall)
{
var fotoWeb = Directory.EnumerateFiles(#"\\remote path...");
bool exists = false;
Parallel.ForEach(fotoWeb, foto =>
{
if (foto.Substring(foto.LastIndexOf("\\") + 1) == fotoSmall)
exists = true;
});
return exists;
}
What i wrong ? How can improve time complexity?
Ciao, your code has many threading errors. Lets talk about Shared resources:
var paths = new StringBuilder(); //this is a shared resuorces
int count = 0; //this is a shared resuorces
int countFotoSmall = 0; //this is a shared resuorces
Parallel.ForEach(articoli, (articolo) =>
{
count++;
if (FotoExists($"{articolo.Codart}Small.jpg"))
{
paths.AppendLine(#"""/assets/items/" + articolo.Codart + #"Small.jpg"",");
countFotoSmall++;
}
});
All the variables outside Parallel are called shared. This means that if you want to use them in Parallel you need a lock statement:
var paths = new StringBuilder();
object lockpaths = new object();
int count = 0;
object lockcount = new object();
int countFotoSmall = 0;
object lockcountFotoSmall = new object();
Parallel.ForEach(articoli, (articolo) =>
{
lock (lockcount) count++;
if (FotoExists($"{articolo.Codart}Small.jpg"))
{
lock (lockpaths) paths.AppendLine(#"""/assets/items/" + articolo.Codart + #"Small.jpg"",");
lock (countFotoSmall) countFotoSmall++;
}
});
lock means that one thread for each time can takes a shared variable and use it. Otherwise result could be totally unpredictable!
As you can see, you Parallel is quite useless bacause almost all the code must be done in lock (sequentially). My opinion is to remove this Parallel and to put a normal for.
Also this one:
Parallel.ForEach(fotoWeb, foto =>
{
if (foto.Substring(foto.LastIndexOf("\\") + 1) == fotoSmall)
exists = true; //shared variable. Not good...
});
is not good. My opinion is to remove this Parallel.
So how increase performances? Tryo to modify your code like this:
private async void button4_ClickAsync(object sender, EventArgs e)
{
var context = new MyEntities();
var articoli = context.Articoli.AsNoTracking()
.Where(...)
.OrderByDescending(...)
.ThenBy(...)
.ToList();
var paths = new StringBuilder();
int count = 0;
int countFotoSmall = 0;
var fotoWeb = Directory.EnumerateFiles(#"\\remote path...");
foreach(var articolo in articoli) {
count++;
if (FotoExists($"{articolo.Codart}Small.jpg", fotoWeb)) {
paths.AppendLine(#"""/assets/items/" + articolo.Codart + #"Small.jpg"",");
countFotoSmall++;
}
}
using (var file = new StreamWriter(#"C:.../fotoSmallSW.txt"))
{
await file.WriteAsync(paths.ToString());
}
MessageBox.Show($"End successfully: articoli {count}, items Small.jpg {countFotoSmall}");
}
private bool FotoExists(string fotoSmall, IEnumerable fotoWeb)
{
bool exists = false;
foreach(string foto in fotoWeb) {
if (foto.Substring(foto.LastIndexOf("\\") + 1) == fotoSmall){
exists = true;
break;
}
}
return exists;
}
What I have done differently from you? 3 things:
Calculate fotoWeb one time and not for each iteration;
Use break; in for to exit as soon as exists is set to true;
Remove Parallel statements.
Based on all suggests i end up with that code...and works very well
Howere if any mistakes are found please notify me
tks everyone
private async void button4_ClickAsync(object sender, EventArgs e)
{
var context = new MyEntities();
var articoli = context.Articoli.AsNoTracking()
.Where(...)
.OrderByDescending(...)
.ToList();
var paths = new StringBuilder();
int count = 0;
int countFotoSmall = 0;
Parallel.ForEach(articoli, (articolo) =>
{
Interlocked.Increment(ref count);
if (FotoExists($"{articolo.Codart}Small.jpg"))
{
lock (paths)
{
paths.AppendLine(#"""/assets/items/" + articolo.Codart + #"Small.jpg"",");
}
Interlocked.Increment(ref countFotoSmall);
}
});
using (var file = new StreamWriter(#"C:\...\fotoSmallSW.txt"))
{
await file.WriteAsync(paths.ToString());
}
MessageBox.Show($"End successfully: articoli {count}, items Small.jpg {countFotoSmall}");
}
private bool FotoExists(string fotoSmall)
{
if (File.Exists(#"\\path\" + fotoSmall))
return true;
return false;
}
I have a simple program that just reading XPS file, i've read the following post and it did solve part of the issue.
Opening XPS document in .Net causes a memory leak
class Program
{
static int intCounter = 0;
static object _intLock = new object();
static int getInt()
{
lock (_intLock)
{
return intCounter++;
}
}
static void Main(string[] args)
{
Console.ReadLine();
for (int i = 0; i < 100; i++)
{
Thread t = new Thread(() =>
{
var ogXps = File.ReadAllBytes(#"C:\Users\Nathan\Desktop\Objective.xps");
readXps(ogXps);
Console.WriteLine(getInt().ToString());
});
t.SetApartmentState(ApartmentState.STA);
t.Start();
Thread.Sleep(50);
}
Console.ReadLine();
}
static void readXps(byte[] originalXPS)
{
try
{
MemoryStream inputStream = new MemoryStream(originalXPS);
string memoryStreamUri = "memorystream://" + Path.GetFileName(Guid.NewGuid().ToString() + ".xps");
Uri packageUri = new Uri(memoryStreamUri);
Package oldPackage = Package.Open(inputStream);
PackageStore.AddPackage(packageUri, oldPackage);
XpsDocument xpsOld = new XpsDocument(oldPackage, CompressionOption.Normal, memoryStreamUri);
FixedDocumentSequence seqOld = xpsOld.GetFixedDocumentSequence();
//The following did solve some of the memory issue
//-----------------------------------------------
var docPager = seqOld.DocumentPaginator;
docPager.ComputePageCount();
for (int i = 0; i < docPager.PageCount; i++)
{
FixedPage fp = docPager.GetPage(i).Visual as FixedPage;
fp.UpdateLayout();
}
seqOld = null;
//-----------------------------------------------
xpsOld.Close();
oldPackage.Close();
oldPackage = null;
inputStream.Close();
inputStream.Dispose();
inputStream = null;
PackageStore.RemovePackage(packageUri);
}
catch (Exception e)
{
}
}
}
^ The program will read a XPS file for hundred times
^Before apply the fix
^After apply the fix
So the fix in the post suggested did eliminate some objects, however i found that there are still objects like Dispatcher , ContextLayoutManager and MediaContext still exists in memory and their number are exactly 100, is this a normal behavior or a memory leak? How do i fix this? Thanks.
25/7/2018 Update
Adding the line Dispatcher.CurrentDispatcher.InvokeShutdown(); did get rid of the Dispatcher , ContextLayoutManager and MediaContext object, don't know if this is an ideal way to fix.
It looks like those classes you're left with are from the XPSDocument, that implements IDisposable but you don't call those. And there are a few more classes that implement that same interface and if they do, as a rule of thumb, either wrap them in a using statement so it is guaranteed their Dispose method gets called or call their Dispose method your self.
An improved version of your readXps method will look like this:
static void readXps(byte[] originalXPS)
{
try
{
using (MemoryStream inputStream = new MemoryStream(originalXPS))
{
string memoryStreamUri = "memorystream://" + Path.GetFileName(Guid.NewGuid().ToString() + ".xps");
Uri packageUri = new Uri(memoryStreamUri);
using(Package oldPackage = Package.Open(inputStream))
{
PackageStore.AddPackage(packageUri, oldPackage);
using(XpsDocument xpsOld = new XpsDocument(oldPackage, CompressionOption.Normal, memoryStreamUri))
{
FixedDocumentSequence seqOld = xpsOld.GetFixedDocumentSequence();
//The following did solve some of the memory issue
//-----------------------------------------------
var docPager = seqOld.DocumentPaginator;
docPager.ComputePageCount();
for (int i = 0; i < docPager.PageCount; i++)
{
FixedPage fp = docPager.GetPage(i).Visual as FixedPage;
fp.UpdateLayout();
}
seqOld = null;
//-----------------------------------------------
} // disposes XpsDocument
} // dispose Package
PackageStore.RemovePackage(packageUri);
} // dispose MemoryStream
}
catch (Exception e)
{
// really do something here, at least:
Debug.WriteLine(e);
}
}
This should at least clean-up most of the objects. I'm not sure if you're going to see the effects in your profiling as that depends on if the objects are actually collected during your analysis. Profiling a debug build might give unanticipated results.
As the remainder of those object instances seem to be bound to the System.Windows.Threading.Dispatcher I suggest you could try to keep a reference to your Threads (but at this point you might consider looking into Tasks) ansd once all threads are done, call the static ExitAllFrames on the Dispatcher.
Your main method will then look like this:
Console.ReadLine();
Thread[] all = new Thread[100];
for (int i = 0; i < all.Length; i++)
{
var t = new Thread(() =>
{
var ogXps = File.ReadAllBytes(#"C:\Users\Nathan\Desktop\Objective.xps");
readXps(ogXps);
Console.WriteLine(getInt().ToString());
});
t.SetApartmentState(ApartmentState.STA);
t.Start();
all[i] = t; // keep reference
Thread.Sleep(50);
}
foreach(var t in all) t.Join(); // https://stackoverflow.com/questions/263116/c-waiting-for-all-threads-to-complete
all = null; // meh
Dispatcher.ExitAllFrames(); // https://stackoverflow.com/a/41953265/578411
Console.ReadLine();
I would like to seek your help in implementing Multi-Threading in my C# program.
The program aims to upload 10,000++ files to an ftp server. I am planning to implement atleast a minimum of 10 threads to increase the speed of the process.
With this, this is the line of code that I have:
I have initialized 10 Threads:
public ThreadStart[] threadstart = new ThreadStart[10];
public Thread[] thread = new Thread[10];
My plan is to assign one file to a thread, as follows:
file 1 > thread 1
file 2 > thread 2
file 3 > thread 3
.
.
.
file 10 > thread 10
file 11 > thread 1
.
.
.
And so I have the following:
foreach (string file in files)
{
loop++;
threadstart[loop] = new ThreadStart(() => ftp.uploadToFTP(uploadPath + #"/" + Path.GetFileName(file), file));
thread[loop] = new Thread(threadstart[loop]);
thread[loop].Start();
if (loop == 9)
{
loop = 0;
}
}
The passing of files to their respective threads is working. My problem is that the starting of the thread is overlapping.
One example of exception is that when Thread 1 is running, then a file is passed to it. It returns an error since Thread 1 is not yet successfully done, then a new parameter is being passed to it. Also true with other threads.
What is the best way to implement this?
Any feedback will be greatly appreciated. Thank you! :)
Using async-await and just pass an array of files into it:
private static async void TestFtpAsync(string userName, string password, string ftpBaseUri,
IEnumerable<string> fileNames)
{
var tasks = new List<Task<byte[]>>();
foreach (var fileInfo in fileNames.Select(fileName => new FileInfo(fileName)))
{
using (var webClient = new WebClient())
{
webClient.Credentials = new NetworkCredential(userName, password);
tasks.Add(webClient.UploadFileTaskAsync(ftpBaseUri + fileInfo.Name, fileInfo.FullName));
}
}
Console.WriteLine("Uploading...");
foreach (var task in tasks)
{
try
{
await task;
Console.WriteLine("Success");
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
}
}
}
Then call it like this:
const string userName = "username";
const string password = "password";
const string ftpBaseUri = "ftp://192.168.1.1/";
var fileNames = new[] { #"d:\file0.txt", #"d:\file1.txt", #"d:\file2.txt" };
TestFtpAsync(userName, password, ftpBaseUri, fileNames);
Why doing it the hard way?
.net already has a class called ThreadPool.
You can just use that and it manages the threads itself.
Your code will be like this:
static void DoSomething(object n)
{
Console.WriteLine(n);
Thread.Sleep(10);
}
static void Main(string[] args)
{
ThreadPool.SetMaxThreads(20, 10);
for (int x = 0; x < 30; x++)
{
ThreadPool.QueueUserWorkItem(new WaitCallback(DoSomething), x);
}
Console.Read();
}
Let me rephrase.
I have a method that generates strings(paths) after given a start string(path)
IF those paths are for a directory I want to enqueue that in the input of the method.
After processing the path synchronously, I want to get the Data and clone it async into multiple paths of a pipeline, were each path needs to get the datablock. So the Broadcastblock is out of the question (it cant send a blocking signal to the blocks before itself),
The joinblock, joining the results is relatively straight forward.
So to sum up
Is there a Block in Dataflow block, where i can access the inputqueue from the delegate, if when, how?
Is there a construct that acts like the broadcastblock but can block the blocks that came before it?
I tried doing it via almighty google:
class subversion
{
private static string repo;
private static string user;
private static string pw;
private static DateTime start;
private static DateTime end;
private static List<parserObject> output;
public static List<parserObject> svnOutputList
{
get {return output; }
}
private static List<string> extension_whitelist;
public async void run(string link, string i_user, string i_pw, DateTime i_start, DateTime i_end)
{
repo = link;
user = i_user;
pw = i_pw;
start = i_start;
end = i_end;
output = new List<parserObject>();
BufferBlock<string> crawler_que = new BufferBlock<string>();
BufferBlock<svnFile> parser_que = new BufferBlock<svnFile>();
var svn = crawl(crawler_que, parser_que);
var broadcaster = new ActionBlock<svnFile>(async file =>
{//tried to addapt the code from this ensure always send broadcastblock -> see link below
List<Task> todo = new List<Task>();
todo.Add(mLoc);//error cannot convert methodgroup to task
foreach (var task in todo)//error: Only assignment, call, increment, decrement, await, and new object expressions can be used as a statement?
{
task.SendAsync(file);//error cannot convert task to targetblock
}
await Task.WhenAll(todo.ToArray());
});
parser_que.LinkTo(broadcaster);
await Task.WhenAll(broadcaster, svn);//error cannot convert actionblock to task
}
private static async Task crawl(BufferBlock<string> in_queue, BufferBlock<svnFile> out_queue)
{
SvnClient client = new SvnClient();
client.Authentication.ForceCredentials(user, pw);
SvnListArgs arg = new SvnListArgs
{
Depth = SvnDepth.Children,
RetrieveEntries = SvnDirEntryItems.AllFieldsV15
};
while (await in_queue.OutputAvailableAsync())
{
string buffer_author = null;
string prev_author = null;
System.Collections.ObjectModel.Collection<SvnListEventArgs> contents;
string link = await in_queue.ReceiveAsync();
if (client.GetList(new Uri(link), arg, out contents))
{
foreach (SvnListEventArgs item in contents)
{
if (item.Entry.NodeKind == SvnNodeKind.Directory)
{
in_queue.Post(item.Path);
}
else if (item.Entry.NodeKind == SvnNodeKind.File)
{
try
{
int length = item.Name.LastIndexOf(".");
if (length <= 0)
{
continue;
}
string ext = item.Name.Substring(length);
if (extension_whitelist.Contains(ext))
{
Uri target = new Uri((repo + link));
SvnRevisionRange range;
SvnBlameArgs args = new SvnBlameArgs
{
Start = start.AddDays(-1),
End = end
};
try
{
svnFile file_instance = new svnFile();
client.Blame(target, args, delegate(object sender3, SvnBlameEventArgs e)
{
if (e.Author != null)
{
buffer_author = e.Author;
prev_author = e.Author;
}
else
{
buffer_author = prev_author;
}
file_instance.lines.Add(new svnLine(buffer_author, e.Line));
});
out_queue.Post(file_instance);
}
catch (Exception a) { Console.WriteLine("exception:" + a.Message);}
}
}
catch (Exception a)
{
}
}
}
}
}
}
private static async Task mLoc(svnFile file)
{
List<parserPart> parts = new List<parserPart>();
int find;
foreach (svnLine line in file.lines)
{
if ((find = parts.FindIndex(x => x.uploader_id == line.author)) > 0)
{
parts[find].count += 1;
}
else
{
parts.Add(new parserPart(line.author));
}
find = 0;
}
parserObject ret = new parserObject(parts, "mLoc");
await output.Add(ret);
return;
}
}
broadcastblock answer: Alternate to Dataflow BroadcastBlock with guaranteed delivery