Is there a way to get the path of the system files like "wininit.exe" from processId? Below code doesn't work. Process.GetProcesses() also doesn't return anything logical. Please help me.
P.S. I'm trying to code my own task manager designed based on my needs.
private static string GetMainModuleFilepath(int processId)
{
string wmiQueryString = "SELECT ProcessId, ExecutablePath FROM Win32_Process WHERE ProcessId = " + processId;
using (var searcher = new ManagementObjectSearcher(wmiQueryString))
{
using (var results = searcher.Get())
{
ManagementObject mo = results.Cast<ManagementObject>().FirstOrDefault();
if (mo != null)
{
return (string)mo["ExecutablePath"];
}
}
}
return null;
}
You can use the Process.GetProcessById method and pass in the ProcessId.
Then you can use the MainModule.FileName property on the ProcessModule.
My full code can be seen below: (I have done this in a Console App for quicker writing)
static void Main(string[] args)
{
while (true)
{
Console.WriteLine("Enter Process ID:");
var processIdString = Console.ReadLine();
var parsed = int.TryParse(processIdString, out var procId);
if (parsed)
{
var path = GetMainModuleFilepath(procId);
Console.WriteLine($"Found Path: {path}");
}
else
{
Console.WriteLine("Process Id must be a number!");
}
}
}
private static string GetMainModuleFilepath(int processId)
{
var process = Process.GetProcessById(processId);
if (process == null)
{
return string.Empty;
}
return process.MainModule?.FileName;
}
Which results in the following:
Note:
If you are running this code in 32 bit application, you'll not be able to access 64-bit application paths, so you'd have to compile and run you app as 64-bit application (Project Properties → Build → Platform Target → x64).
Related
I'm using following code to get disk performance values from windows PC.
using System;
using System.Management;
namespace DiskPerformance
{
class Program
{
#region Constants
private const string QUERY = "select * from Win32_PerfFormattedData_PerfDisk_LogicalDisk";
private const string NAME = "Name";
private const char COLON = ':';
private const string READ_PER_SECOND = "DiskReadsPerSec";
private const string WRITE_PER_SECOND = "DiskWritesPerSec";
#endregion
static void Main(string[] args)
{
try
{
var objMOS = new ManagementObjectSearcher(QUERY);
var collection = objMOS.Get();
foreach (ManagementObject service in collection)
{
if (!(service.Properties[NAME].Value is String name) ||
name.Length != 2 || name[1] != COLON)
{
continue;
}
name = name[0].ToString();
var diskReadsPerSec = ((UInt32)service.Properties[READ_PER_SECOND].Value);
var diskWritesPerSec = ((UInt32)service.Properties[WRITE_PER_SECOND].Value);
Console.WriteLine("Disk = {0}, Read per Sec = {1}, Write per Sec = {2}", name, diskReadsPerSec, diskWritesPerSec);
}
Console.Read();
}
catch (Exception ex)
{
throw ex;
}
}
}
}
Above code works fine in most of the environments, but it throws following error while using windows 8.1 pro.
System.Management.ManagementException: Invalid query
I do not get any additional information from stack trace either.
Stack trace :
System.Management.ManagementException.ThrowWithExtendedInfo(ManagementStatus
errorCode)
System.Management.ManagementObjectCollection.ManagementObjectEnumerator.MoveNext()
Does anyone has any idea what could be the problem?
PS: I already have administrator privileges and program is executed as admin.
I'm looking to parse the WebCacheV01.dat file using C# to find the last file location for upload in an Internet browser.
%LocalAppData%\Microsoft\Windows\WebCache\WebCacheV01.dat
I using the Managed Esent nuget package.
Esent.Isam
Esent.Interop
When I try and run the below code it fails at:
Api.JetGetDatabaseFileInfo(filePath, out pageSize, JET_DbInfo.PageSize);
Or if I use
Api.JetSetSystemParameter(instance, JET_SESID.Nil, JET_param.CircularLog, 1, null);
at
Api.JetAttachDatabase(sesid, filePath, AttachDatabaseGrbit.ReadOnly);
I get the following error:
An unhandled exception of type
'Microsoft.Isam.Esent.Interop.EsentFileAccessDeniedException' occurred
in Esent.Interop.dll
Additional information: Cannot access file, the file is locked or in use
string localAppDataPath = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData);
string filePathExtra = #"\Microsoft\Windows\WebCache\WebCacheV01.dat";
string filePath = string.Format("{0}{1}", localAppDataPath, filePathExtra);
JET_INSTANCE instance;
JET_SESID sesid;
JET_DBID dbid;
JET_TABLEID tableid;
String connect = "";
JET_SNP snp;
JET_SNT snt;
object data;
int numInstance = 0;
JET_INSTANCE_INFO [] instances;
int pageSize;
JET_COLUMNDEF columndef = new JET_COLUMNDEF();
JET_COLUMNID columnid;
Api.JetCreateInstance(out instance, "instance");
Api.JetGetDatabaseFileInfo(filePath, out pageSize, JET_DbInfo.PageSize);
Api.JetSetSystemParameter(JET_INSTANCE.Nil, JET_SESID.Nil, JET_param.DatabasePageSize, pageSize, null);
//Api.JetSetSystemParameter(instance, JET_SESID.Nil, JET_param.CircularLog, 1, null);
Api.JetInit(ref instance);
Api.JetBeginSession(instance, out sesid, null, null);
//Do stuff in db
Api.JetEndSession(sesid, EndSessionGrbit.None);
Api.JetTerm(instance);
Is it not possible to read this without making modifications?
Viewer
http://www.nirsoft.net/utils/ese_database_view.html
Python
https://jon.glass/attempts-to-parse-webcachev01-dat/
libesedb
impacket
Issue:
The file is probably in use.
Solution:
in order to free the locked file, please stop the Schedule Task -\Microsoft\Windows\Wininet\CacheTask.
The Code
public override IEnumerable<string> GetBrowsingHistoryUrls(FileInfo fileInfo)
{
var fileName = fileInfo.FullName;
var results = new List<string>();
try
{
int pageSize;
Api.JetGetDatabaseFileInfo(fileName, out pageSize, JET_DbInfo.PageSize);
SystemParameters.DatabasePageSize = pageSize;
using (var instance = new Instance("Browsing History"))
{
var param = new InstanceParameters(instance);
param.Recovery = false;
instance.Init();
using (var session = new Session(instance))
{
Api.JetAttachDatabase(session, fileName, AttachDatabaseGrbit.ReadOnly);
JET_DBID dbid;
Api.JetOpenDatabase(session, fileName, null, out dbid, OpenDatabaseGrbit.ReadOnly);
using (var tableContainers = new Table(session, dbid, "Containers", OpenTableGrbit.ReadOnly))
{
IDictionary<string, JET_COLUMNID> containerColumns = Api.GetColumnDictionary(session, tableContainers);
if (Api.TryMoveFirst(session, tableContainers))
{
do
{
var retrieveColumnAsInt32 = Api.RetrieveColumnAsInt32(session, tableContainers, columnIds["ContainerId"]);
if (retrieveColumnAsInt32 != null)
{
var containerId = (int)retrieveColumnAsInt32;
using (var table = new Table(session, dbid, "Container_" + containerId, OpenTableGrbit.ReadOnly))
{
var tableColumns = Api.GetColumnDictionary(session, table);
if (Api.TryMoveFirst(session, table))
{
do
{
var url = Api.RetrieveColumnAsString(
session,
table,
tableColumns["Url"],
Encoding.Unicode);
var downloadedFileName = Api.RetrieveColumnAsString(
session,
table,
columnIds2["Filename"]);
if(string.IsNullOrEmpty(downloadedFileName)) // check for download history only.
continue;
// Order by access Time to find the last uploaded file.
var accessedTime = Api.RetrieveColumnAsInt64(
session,
table,
columnIds2["AccessedTime"]);
var lastVisitTime = accessedTime.HasValue ? DateTime.FromFileTimeUtc(accessedTime.Value) : DateTime.MinValue;
results.Add(url);
}
while (Api.TryMoveNext(session, table.JetTableid));
}
}
}
} while (Api.TryMoveNext(session, tableContainers));
}
}
}
}
}
catch (Exception ex)
{
// log goes here....
}
return results;
}
Utils
Task Scheduler Wrapper
You can use Microsoft.Win32.TaskScheduler.TaskService Wrapper to stop it using c#, just add this Nuget package [nuget]:https://taskscheduler.codeplex.com/
Usage
public static FileInfo CopyLockedFileRtl(DirectoryInfo directory, FileInfo fileInfo, string remoteEndPoint)
{
FileInfo copiedFileInfo = null;
using (var ts = new TaskService(string.Format(#"\\{0}", remoteEndPoint)))
{
var task = ts.GetTask(#"\Microsoft\Windows\Wininet\CacheTask");
task.Stop();
task.Enabled = false;
var byteArray = FileHelper.ReadOnlyAllBytes(fileInfo);
var filePath = Path.Combine(directory.FullName, "unlockedfile.dat");
File.WriteAllBytes(filePath, byteArray);
copiedFileInfo = new FileInfo(filePath);
task.Enabled = true;
task.Run();
task.Dispose();
}
return copiedFileInfo;
}
I was not able to get Adam's answer to work. What worked for me was making a copy with AlphaVSS (a .NET class library that has a managed API for the Volume Shadow Copy Service). The file was in "Dirty Shutdown" state, so I additionally wrote this to handle the exception it threw when I opened it:
catch (EsentErrorException ex)
{ // Usually after the database is copied, it's in Dirty Shutdown state
// This can be verified by running "esentutl.exe /Mh WebCacheV01.dat"
logger.Info(ex.Message);
switch (ex.Error)
{
case JET_err.SecondaryIndexCorrupted:
logger.Info("Secondary Index Corrupted detected, exiting...");
Api.JetTerm2(instance, TermGrbit.Complete);
return false;
case JET_err.DatabaseDirtyShutdown:
logger.Info("Dirty shutdown detected, attempting to recover...");
try
{
Api.JetTerm2(instance, TermGrbit.Complete);
Process.Start("esentutl.exe", "/p /o " + newPath);
Thread.Sleep(5000);
Api.JetInit(ref instance);
Api.JetBeginSession(instance, out sessionId, null, null);
Api.JetAttachDatabase(sessionId, newPath, AttachDatabaseGrbit.None);
}
catch (Exception e2)
{
logger.Info("Could not recover database " + newPath + ", will try opening it one last time. If that doesn't work, try using other esentutl commands", e2);
}
break;
}
}
I'm thinking about using the 'Recent Items' folder as when you select a file to upload an entry is written here:
C:\Users\USER\AppData\Roaming\Microsoft\Windows\Recent
string recent = (Environment.GetFolderPath(Environment.SpecialFolder.Recent));
I am trying to use ClrMD to dump the stacktrace of all threads running within a specific process. The code works fine in my devlopment enviornment but not on the production server.
The server is running: Windows Server 2012 R2 Standard
The error I recieve is:
Could not attach to process. Error 0.
This post asks how to attach ClrMD to another users process, which was what I was trying to do. I terminated the process (which is running as a windows service) and started it as the same user that I am trying to execute ClrMD with. I still get the error.
Tried giving the user debugging privlidges but that didnt help either.
I bet the problem has something to do with how to production server is configured. I have administrator rights.
Any suggestions on what to do next?
Code:
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Microsoft.Diagnostics.Runtime;
namespace ConsoleApplication4
{
class Program
{
static void Main(string[] args)
{
int pid = 0;
var result = new Dictionary<int, string[]>();
var targetProcessName = "Dist.TingbogScraper.Business.TingbogScraperService.vshost";
// Change this to the process you are looking for
var outputPath = "C:\\temp\\ClrMDresult.txt";
var exceptionOutput = "C:\\temp\\ClrMDdump.txt";
var processes = Process.GetProcesses();
foreach (var process in processes)
{
if (process.ProcessName.Contains(targetProcessName))
{
pid = process.Id;
}
}
try
{
using (var dataTarget = DataTarget.AttachToProcess(pid, 5000, AttachFlag.Passive))
{
ClrRuntime runtime = dataTarget.ClrVersions.First().CreateRuntime();
foreach (var t in runtime.Threads)
{
try
{
if (t.StackTrace != null)
{
result.Add(
t.ManagedThreadId,
t.StackTrace.Select(f =>
{
if (f.Method != null)
{
return f.Method.Type.Name + "." + f.Method.Name;
}
return null;
}).ToArray()
);
}
}
catch (Exception ex)
{
}
}
}
foreach (var kvp in result)
{
var value = kvp.Value;
foreach (var stacktrace in value)
{
System.IO.File.AppendAllText(outputPath,
string.Format("{0} {1} {2}", kvp.Key, stacktrace, Environment.NewLine));
}
}
}
catch (ClrDiagnosticsException ex)
{
System.IO.File.AppendAllText(outputPath,
string.Format("{0} {1} {2}", ex.Message, ex.StackTrace, ex.Source));
}
}
}
}
Found out that the name of the process was different on my development environment compared to production.
Correcting the name of the process fixed the error.
I want to display the folder and file size in list view which is placed in my form.
Is it possible to achieve the folder size at the whole(including sub folders and files) and display it from remote computer?
With the following code, i can get the original file size, but am not all getting original folder size. Instead of it, am getting folder size as 0kb.
ManagementScope ManagementScope1 = new ManagementScope(string.Format("\\\\{0}\\root\\cimv2", strIP), options);
ManagementScope1.Connect();
ObjectGetOptions objectGetOptions = new ObjectGetOptions();
ObjectQuery obq1 = new ObjectQuery("Associators of {Win32_Directory.Name='D:'} Where ResultRole = PartComponent ");
ManagementObjectSearcher searcher1 = new ManagementObjectSearcher(ManagementScope1, obq1);
foreach (ManagementObject ManagementObject2 in searcher1.Get())
{
lvData[0] = ManagementObject2["FileName"].ToString();
lvData[1] = formatSize(Convert.ToInt64(ManagementObject2["FileSize"]));
ListViewItem lvItem = new ListViewItem(lvData, 0);
lvFiles.Items.Add(lvItem);
}
formatSize() as follows :
protected string formatSize(Int64 lSize)
{
//Format number to KB
string stringSize = "";
NumberFormatInfo myNfi = new NumberFormatInfo();
Int64 lKBSize = 0;
if (lSize < 1024)
{
if (lSize == 0)
{
//zero byte
stringSize = "0";
}
else
{
//less than 1K but not zero byte
stringSize = "1";
}
}
else
{
//convert to KB
lKBSize = lSize / 1024;
//format number with default format
stringSize = lKBSize.ToString("n", myNfi);
//remove decimal
stringSize = stringSize.Replace(".00", "");
}
return stringSize + " KB";
}
I also tried with this link, but i fail because of Object reference not set to an instance of an object error when i used as,
FolderSize += (UInt64)ManagementObject2["FileSize"];
lvData[1] = formatSize(Convert.ToInt64 (FolderSize));
So kindly someone help me to overcome this issue.
There is no size for a folder, a folder is just a container for files, the size of the files determine the foldersize.
To get the overall foldersize you can do an approximation by finding the disksize and the freespace and then subtracting those two which will give you the folder size.
On a background scan the folders with CIM_Directory and CIM_DataFile. You can call the scan method with a managementscope and a drive letter (D:)
I did run in it from a ThreadPool thread like so:
ManagementScope ManagementScope1 = new ManagementScope();
ManagementScope1.Connect();
ThreadPool.QueueUserWorkItem((que) => { scan(ManagementScope1, "D:"); });
Iterate over folders with WMI
private void scan(ManagementScope scope, string drive)
{
var disk = scope.Device(drive).GetEnumerator();
if (!disk.MoveNext())
{
Add(String.Format("{0} drive not found",drive),0);
return;
}
Add(drive, disk.Current.Size() - disk.Current.FreeSpace());
// iterate over root Folders
foreach (var folder in scope.Folder(drive))
{
ulong totalsize = 0;
try
{
// iterate over the files
foreach (var file in scope.File(
drive,
folder.Path(),
folder.FileName()))
{
totalsize += file.FileSize();
}
// iterate over all subfolders
foreach (var subfolder in scope.SubFolder(drive
, folder.Path()
, folder.FileName()))
{
// iterate over files within a folder
foreach (var file in scope.File(
drive,
subfolder.Path(),
subfolder.FileName()))
{
totalsize += file.FileSize();
}
}
}
catch (Exception exp)
{
Debug.WriteLine(exp.Message);
}
Add(folder.Name(), totalsize);
}
}
Extension methods
The original code became close to unmaintainable so I implemented Extension methods for ManagementScope and ManagementBaseObject.
public static class ManagementObjectExtensions
{
const string WQL_DEVICE = "Select Size,FreeSpace from Win32_LogicalDisk where Deviceid='{0}'";
const string WQL_FOLDER = "Select Path, Filename, Name from CIM_Directory where Drive='{0}' and path='\\\\' and system = false and hidden = false and readable = true";
const string WQL_SUBFOLDER = "Select Path, Filename from CIM_Directory where Drive='{0}' and path like '{1}{2}\\\\%' and system = false and hidden = false and readable = true";
const string WQL_FILE = "Select FileSize from CIM_DataFile where Drive='{0}' AND Path = '{1}{2}\\\\' ";
// internal helper to get an enumerable collection from any WQL
private static ManagementObjectCollection GetWqlEnumerator(this ManagementScope scope, string wql, params object[] args)
{
return new ManagementObjectSearcher(
scope,
new ObjectQuery(
String.Format(wql, args)))
.Get();
}
public static ManagementObjectCollection Device(this ManagementScope scope, params object[] args)
{
return scope.GetWqlEnumerator(WQL_DEVICE, args);
}
public static ManagementObjectCollection Folder(this ManagementScope scope, params object[] args)
{
return scope.GetWqlEnumerator(WQL_FOLDER, args);
}
public static ManagementObjectCollection SubFolder(this ManagementScope scope, params object[] args)
{
return scope.GetWqlEnumerator(WQL_SUBFOLDER, args);
}
public static ManagementObjectCollection File(this ManagementScope scope, params object[] args)
{
return scope.GetWqlEnumerator(WQL_FILE, args);
}
public static string Path(this ManagementBaseObject mo)
{
return mo["Path"].ToString().Replace("\\","\\\\");
}
public static string Name(this ManagementBaseObject mo)
{
return mo["Name"].ToString();
}
public static string FileName(this ManagementBaseObject mo)
{
return mo["FileName"].ToString();
}
public static ulong FreeSpace(this ManagementBaseObject mo)
{
return (ulong)mo["FreeSpace"];
}
public static ulong Size(this ManagementBaseObject mo)
{
return (ulong) mo["Size"];
}
public static ulong FileSize(this ManagementBaseObject mo)
{
return (ulong) mo["FileSize"];
}
}
helper for adding items to the Listview
This little helper handles switching to the UI thread if needed
// UI Thread safe helper for adding an item
private void Add(string name, ulong size)
{
if (this.listView1.InvokeRequired)
{
this.listView1.Invoke(new MethodInvoker(() => Add(name, size)));
}
else
{
var lvi = new ListViewItem(name);
lvi.SubItems.Add(size.ToString());
this.listView1.Items.Add(lvi);
}
}
I looked up a path selection issue in this answer from RRUZ
The problem, as #rene pointed out, is that folders don't actually have a size. They are containers. To get the total size you would have to enumerate all the files in the directory and subdirectories to calculate it.
Excerpt from MSDN
FileSize
Data type: uint64
Access type: Read-only
Size of the file system object, in bytes. Although folders possess a FileSize property, the value 0 is always returned. To determine the size of a folder, use the FileSystemObject or add up the size of all the files stored in the folder.
For more information about using uint64 values in scripts, see Scripting in WMI.
The easiest way, arguably, is to use the DirectoryInfo class, UNC paths and some LINQ.
var folder = #"\\MachineOrIp\c$\Temp";
var directory = new DirectoryInfo(folder);
var totalSize = directory.EnumerateFiles("*.*", SearchOption.AllDirectories).Sum(file => file.Length);
Console.WriteLine("{0} - {1} Bytes", folder, totalSize);
This will allow you get get the "total size", in bytes, for a top-level folder.
Since I installed the new Windows Azure SDK 2.3 I got a warning from csrun:
"DevStore interaction through CSRun has been depricated. Use WAStorageEmulator.exe instead."
So there are two questions:
1) How to start the new storage emulator correctly from code?
2) How to determine from code if the storage emulator is already running?
I found the solution myself. Here is my C# code. The old code used for SDK 2.2 is commented out.
public static void StartStorageEmulator()
{
//var count = Process.GetProcessesByName("DSServiceLDB").Length;
//if (count == 0)
// ExecuteCSRun("/devstore:start");
var count = Process.GetProcessesByName("WAStorageEmulator").Length;
if (count == 0)
ExecuteWAStorageEmulator("start");
}
/*
private static void ExecuteCSRun(string argument)
{
var start = new ProcessStartInfo
{
Arguments = argument,
FileName = #"c:\Program Files\Microsoft SDKs\Windows Azure\Emulator\csrun.exe"
};
var exitCode = ExecuteProcess(start);
Assert.AreEqual(exitCode, 0, "Error {0} executing {1} {2}", exitCode, start.FileName, start.Arguments);
}
*/
private static void ExecuteWAStorageEmulator(string argument)
{
var start = new ProcessStartInfo
{
Arguments = argument,
FileName = #"c:\Program Files (x86)\Microsoft SDKs\Windows Azure\Storage Emulator\WAStorageEmulator.exe"
};
var exitCode = ExecuteProcess(start);
Assert.AreEqual(exitCode, 0, "Error {0} executing {1} {2}", exitCode, start.FileName, start.Arguments);
}
private static int ExecuteProcess(ProcessStartInfo start)
{
int exitCode;
using (var proc = new Process { StartInfo = start })
{
proc.Start();
proc.WaitForExit();
exitCode = proc.ExitCode;
}
return exitCode;
}
using System;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Xunit;
namespace UnitTests.Persistence
{
public class AzureStorageEmulatorManagerV3
{
private const string ProcessName = "WAStorageEmulator";
public static void StartStorageEmulator()
{
var count = Process.GetProcessesByName(ProcessName).Length;
if (count == 0)
ExecuteWAStorageEmulator("start");
}
public static void StopStorageEmulator()
{
Process process = GetWAstorageEmulatorProcess();
if (process != null)
{
process.Kill();
}
}
private static void ExecuteWAStorageEmulator(string argument)
{
var start = new ProcessStartInfo
{
Arguments = argument,
FileName = #"c:\Program Files (x86)\Microsoft SDKs\Windows Azure\Storage Emulator\WAStorageEmulator.exe"
};
var exitCode = ExecuteProcess(start);
if (exitCode != 0)
{
string message = string.Format(
"Error {0} executing {1} {2}",
exitCode,
start.FileName,
start.Arguments);
throw new InvalidOperationException(message);
}
}
private static int ExecuteProcess(ProcessStartInfo start)
{
int exitCode;
using (var proc = new Process { StartInfo = start })
{
proc.Start();
proc.WaitForExit();
exitCode = proc.ExitCode;
}
return exitCode;
}
public static Process GetWAstorageEmulatorProcess()
{
return Process.GetProcessesByName(ProcessName).FirstOrDefault();
}
[Fact]
public void StartingAndThenStoppingWAStorageEmulatorGoesOk()
{
// Arrange Start
AzureStorageEmulatorManagerV3.StartStorageEmulator();
// Act
Thread.Sleep(2000);
Process WAStorageEmulatorProcess = GetWAstorageEmulatorProcess();
// Assert
Assert.NotNull(WAStorageEmulatorProcess);
Assert.True(WAStorageEmulatorProcess.Responding);
// Arrange Stop
AzureStorageEmulatorManagerV3.StopStorageEmulator();
Thread.Sleep(2000);
// Act
WAStorageEmulatorProcess = GetWAstorageEmulatorProcess();
// Assert
Assert.Null(WAStorageEmulatorProcess);
}
}
}
See my answer here. It actually uses the WAStorageEmulator status API instead of simply relying on testing whether or not the process exists as in #huha's own answer.
See my answer here. It uses a neat little NuGet package to assist with starting/stopping the Azure Storage Emulator programmatically: RimDev.Automation.StorageEmulator.