What is the way to refresh authorization token while a large file is uploading to Google Drive?
For example: expiration time of the current authorization token is "06:00 AM". The file uploading started at "05:15 AM". So at "06:00 AM" the application get exception due authorization token is invalid.
I've tried to around the issue with the below source code, but it does not work.
/// <summary>
/// Uploads a file with a specified path.
/// </summary>
/// <param name="startFolder">The start folder.</param>
/// <param name="path">The path of destination file.</param>
/// <param name="localFile">The local file to upload.</param>
/// <returns>The uploaded file.</returns>
private File GdUploadFile(File startFolder, string path, FileInfo localFile)
{
if (startFolder == null)
{
throw new ArgumentNullException("startFolder");
}
if (localFile == null)
{
throw new ArgumentNullException("localFile");
}
if (!localFile.Exists)
{
throw new FileNotFoundException("File not found", localFile.FullName);
}
var config = GetConfiguration();
if (config.TraceLog)
{
Destination.Logger.LogDebug(string.Format("{0} \tUpload file \"{1}\" to \"{2}\"", Destination.Name, localFile.FullName, path));
}
string pathToFile = string.IsNullOrEmpty(path) ? localFile.Name : path;
string remotePath = ExtractFilePath(pathToFile);
var fileFolder = GdCreateFolderByPath(startFolder, remotePath);
var fileName = ExtractFileName(pathToFile);
DriveService service = GetDriveService();
var body = new File
{
Title = fileName,
Description = "My File",
MimeType = BackupFileMimeType,
Kind = DriveFileKindType,
OriginalFilename = fileName,
FileExtension = localFile.Extension,
Parents = new List<ParentReference>
{
new ParentReference
{
ETag = fileFolder.ETag,
Id = fileFolder.Id,
Kind = fileFolder.Kind
}
}
};
FilesResource.InsertMediaUpload request;
var source = new MediaFileSource(localFile.FullName, BackupFileMimeType);
using (var fileStream = source.GetDataStream())
{
if (config.TraceLog)
{
Destination.Logger.LogDebug(string.Format("{0} \tUploading \"{1}\"...", Destination.Name, localFile.FullName));
}
request = service.Files.Insert(body, fileStream, body.MimeType);
if (config.TraceLog)
{
int postedPercent = 0;
request.ProgressChanged += p =>
{
var currentPercent = (int) (p.BytesSent/(double) source.ContentLength*100);
if (currentPercent != postedPercent)
{
string msg = string.Format("{0} \tPosted {1}% ({2} bytes)", Destination.Name,
currentPercent, p.BytesSent);
Destination.Logger.LogDebug(msg);
postedPercent = currentPercent;
}
};
}
var connection = Destination.Connection as GoogleDriveDestinationConnection;
Debug.Assert(connection != null, "connection != null");
request.ProgressChanged += p =>
{
bool refreshAuth = connection.ForceRefreshAuthorization();
var auth =
request.Authenticator as
Google.Apis.Authentication.OAuth2.OAuth2Authenticator<NativeApplicationClient>;
if (auth != null && auth.State != null && refreshAuth)
{
var state = connection.AuthorizationState;
auth.State.AccessToken = state.AccessToken;
auth.State.AccessTokenExpirationUtc = state.AccessTokenExpirationUtc;
auth.State.AccessTokenIssueDateUtc = state.AccessTokenIssueDateUtc;
auth.State.Callback = state.Callback;
auth.State.RefreshToken = state.RefreshToken;
auth.State.SaveChanges();
if (config.TraceLog)
{
Destination.Logger.LogDebug("Authorization state for the upload request is updated");
}
}
};
request.ChunkSize = ChunkSize;
request.Upload();
if (config.TraceLog)
{
Destination.Logger.LogDebug(string.Format("{0} \t\"{1}\" uploaded", Destination.Name, localFile.FullName));
}
}
return request.ResponseBody;
}
Consider doing Resumable upload (https://developers.google.com/drive/manage-uploads#resumable). Refresh the token when needed and continue the upload where you left off.
I was not able to find a satisfactory solution to the problem of invalid authorization token.
So I have created my own open source lightweight library for working with files on Google Drive. The library invokes all REST functions directly and have full control over uploading or downloading processes. It resolves the problem by refreshing the authorization token every hour. The library is currently in use in thousands of installations of my company’s Sql Server backup product, it is very stable and has successfully resolved this problem. You can take the source code and examples from here: https://github.com/AlexeyVlg/Pranas.Net.Clouds
Related
I am trying to attach large files to a ToDoTask using the Graph Api using the example in the docs for attaching large files for ToDoTask and the recommend class LargeFileUploadTask for uploading large files.
I have done this sucessfully before with attaching large files to emails and sending so i used that as base for the following method.
public async Task CreateTaskBigAttachments( string idList, string title, List<string> categories,
BodyType contentType, string content, Importance importance, bool isRemindOn, DateTime? dueTime, cAttachment[] attachments = null)
{
try
{
var _newTask = new TodoTask
{
Title = title,
Categories = categories,
Body = new ItemBody()
{
ContentType = contentType,
Content = content,
},
IsReminderOn = isRemindOn,
Importance = importance
};
if (dueTime.HasValue)
{
var _timeZone = TimeZoneInfo.Local;
_newTask.DueDateTime = DateTimeTimeZone.FromDateTime(dueTime.Value, _timeZone.StandardName);
}
var _task = await _graphServiceClient.Me.Todo.Lists[idList].Tasks.Request().AddAsync(_newTask);
//Add attachments
if (attachments != null)
{
if (attachments.Length > 0)
{
foreach (var _attachment in attachments)
{
var _attachmentContentSize = _attachment.ContentBytes.Length;
var _attachmentInfo = new AttachmentInfo
{
AttachmentType = AttachmentType.File,
Name = _attachment.FileName,
Size = _attachmentContentSize,
ContentType = _attachment.ContentType
};
var _uploadSession = await _graphServiceClient.Me
.Todo.Lists[idList].Tasks[_task.Id]
.Attachments.CreateUploadSession(_attachmentInfo).Request().PostAsync();
using (var _stream = new MemoryStream(_attachment.ContentBytes))
{
_stream.Position = 0;
LargeFileUploadTask<TaskFileAttachment> _largeFileUploadTask = new LargeFileUploadTask<TaskFileAttachment>(_uploadSession, _stream, MaxChunkSize);
try
{
await _largeFileUploadTask.UploadAsync();
}
catch (ServiceException errorGraph)
{
if (errorGraph.StatusCode == HttpStatusCode.InternalServerError || errorGraph.StatusCode == HttpStatusCode.BadGateway
|| errorGraph.StatusCode == HttpStatusCode.ServiceUnavailable || errorGraph.StatusCode == HttpStatusCode.GatewayTimeout)
{
Thread.Sleep(1000); //Wait time until next attempt
//Try again
await _largeFileUploadTask.ResumeAsync();
}
else
throw errorGraph;
}
}
}
}
}
}
catch (ServiceException errorGraph)
{
throw errorGraph;
}
catch (Exception ex)
{
throw ex;
}
}
Up to the point of creating the task everything goes well, it does create the task for the user and its properly shown in the user tasks list. Also, it does create an upload session properly.
The problem comes when i am trying to upload the large file in the UploadAsync instruction.
The following error happens.
Code: InvalidAuthenticationToken Message: Access token is empty.
But according to the LargeFileUploadTask doc , the client does not need to set Auth Headers.
param name="baseClient" To use for making upload requests. The client should not set Auth headers as upload urls do not need them.
Is not LargeFileUploadTask allowed to be used to upload large files to a ToDoTask?
If not then what is the proper way to upload large files to a ToDoTask using the Graph Api, can someone provide an example?
If you want, you can raise an issue for the same with the details here, so that they can have look: https://github.com/microsoftgraph/msgraph-sdk-dotnet-core/issues.
It seems like its a bug and they are working on it.
Temporarily I did this code to deal with the issue of the large files.
var _task = await _graphServiceClient.Me.Todo.Lists[idList].Tasks.Request().AddAsync(_newTask);
//Add attachments
if (attachments != null)
{
if (attachments.Length > 0)
{
foreach (var _attachment in attachments)
{
var _attachmentContentSize = _attachment.ContentBytes.Length;
var _attachmentInfo = new AttachmentInfo
{
AttachmentType = AttachmentType.File,
Name = _attachment.FileName,
Size = _attachmentContentSize,
ContentType = _attachment.ContentType
};
var _uploadSession = await _graphServiceClient.Me
.Todo.Lists[idList].Tasks[_task.Id]
.Attachments.CreateUploadSession(_attachmentInfo).Request().PostAsync();
// Get the upload URL and the next expected range from the response
string _uploadUrl = _uploadSession.UploadUrl;
using (var _stream = new MemoryStream(_attachment.ContentBytes))
{
_stream.Position = 0;
// Create a byte array to hold the contents of each chunk
byte[] _chunk = new byte[MaxChunkSize];
//Bytes to read
int _bytesRead = 0;
//Times the stream has been read
var _ind = 0;
while ((_bytesRead = _stream.Read(_chunk, 0, _chunk.Length)) > 0)
{
// Calculate the range of the current chunk
string _currentChunkRange = $"bytes {_ind * MaxChunkSize}-{_ind * MaxChunkSize + _bytesRead - 1}/{_stream.Length}";
//Despues deberiamos calcular el next expected range en caso de ocuparlo
// Create a ByteArrayContent object from the chunk
ByteArrayContent _byteArrayContent = new ByteArrayContent(_chunk, 0, _bytesRead);
// Set the header for the current chunk
_byteArrayContent.Headers.Add("Content-Range", _currentChunkRange);
_byteArrayContent.Headers.Add("Content-Type", _attachment.ContentType);
_byteArrayContent.Headers.Add("Content-Length", _bytesRead.ToString());
// Upload the chunk using the httpClient Request
var _client = new HttpClient();
var _requestMessage = new HttpRequestMessage()
{
RequestUri = new Uri(_uploadUrl + "/content"),
Method = HttpMethod.Put,
Headers =
{
{ "Authorization", bearerToken },
}
};
_requestMessage.Content = _byteArrayContent;
var _response = await _client.SendAsync(_requestMessage);
if (!_response.IsSuccessStatusCode)
throw new Exception("File attachment failed");
_ind++;
}
}
}
}
}
I'm trying to create a program that will download all my OneNote files from OneDrive. But when I try to authenticate using msaAuthenticationProvider a white window appears and then nothing happens. I think the window is supposed to be the Microsoft login, but nothing appears in it.
Here's my code:
string[] scopes = new string[] {
"onedrive.readonly",
"wl.signin"
};
var msaAuthenticationProvider = new MsaAuthenticationProvider(
clientId,
returnURL,
scopes);
await msaAuthenticationProvider.AuthenticateUserAsync();
var client = new OneDriveClient(URL, msaAuthenticationProvider);
It gets to the AuthenticateUserAsync method, then the window apperas, and after that nothing happens.
I'm also not sure what the returnURL is supposed to be because all examples where either for an app version or just said return URL without giving any examples.
sorry for the delay. Have you tried this method:
msaAuthenticationProvider.RestoreMostRecentFromCacheOrAuthenticateUserAsync();
Edit : If the last known connection token is usable, this method can be used to authenticate the user without prompt it. So, this restore the last authentication cache if it can or prompt the user to give his login and password. This can replace the already used AuthenticateUserAsync method. I had the same issue and this method solved it.
Edit 2 : The OneDrive SDK documentation is very poor, I found this myself fiercely as I found that you can get the connection token (to save it for example) and inject it when you need like that in an async task :
if (_OneDriveCacheBlob == null)
{
bool needtosaveblob = true;
_OneDriveCacheBlob = null;
CredentialCache cc = new CredentialCache();
_OneDriveCacheBlob = GetUser(CurrentUserName).OneDriveAuthProviderBlob;
if (_OneDriveCacheBlob != null)
{
cc.InitializeCacheFromBlob(_OneDriveCacheBlob);
needtosaveblob = false;
}
MsaAuthenticationProvider msaAuthProvider = new MsaAuthenticationProvider(OneDriveClass.clientId, OneDriveClass.returnUrl, scopes, cc);
int timeout = 15;
_ = Task.Run(() => WaitForODConnection(msaAuthProvider));
while (!WaitForODConnectionExecuted)
{
if (timeout <= 0)
break;
await Task.Delay(TimeSpan.FromSeconds(1));
timeout -= 1;
}
WaitForODConnectionExecuted = false;
if (timeout <= 0)
{
// Request for reconnection to OneDrive because of invalid Blob
await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.High, () =>
{
//This method requests a new login by a simple msaAuthProvider.AuthenticateUserAsync() call from a new instance of MsaAuthenticationProvider and a new instance of CredentialCache.
//ChangeOneDriveAccount();
});
}
else
{
_OneDriveClient = new OneDriveClient(OneDriveClass.basUrl, msaAuthProvider);
}
string accessToken = msaAuthProvider.CurrentAccountSession.AccessToken;
JObject json = await GetUserInfos(msaAuthProvider.CurrentAccountSession.AccessToken);
if (json != null)
{
// If you need
oneDriveUserName = json["name"].ToString();
oneDriveEmail = json["emails"]["account"].ToString();
}
else
{
//Unable to get OneDrive user informations;
}
if (needtosaveblob)
{
_OneDriveCacheBlob = cc.GetCacheBlob();
//You can save _OneDriveCacheBlob to reuse it later;
}
}
To get the user infos :
/// <summary>
/// Return User informations as a JObject. To get username and email, if return isn't null :
/// username = json["name"].ToString();
/// email = json["emails"]["account"].ToString();
/// </summary>
/// <param name="accessToken">accesstoken of Onedrive account</param>
/// <returns>JObject value</returns>
public static async Task<JObject> GetUserInfos(string accessToken)
{
JObject json = null;
Uri uri = new Uri($"https://apis.live.net/v5.0/me?access_token={accessToken}");
System.Net.Http.HttpClient httpClient = new System.Net.Http.HttpClient();
System.Net.Http.HttpResponseMessage result = await httpClient.GetAsync(uri);
//user info returnd as JSON
string jsonUserInfo = await result.Content.ReadAsStringAsync();
if (jsonUserInfo != null)
{
json = JObject.Parse(jsonUserInfo);
//username = json["name"].ToString();
//email = json["emails"]["account"].ToString();
}
return json;
}
And because the OneDrive method never expires if the token is no longer usable :
bool WaitForODConnectionExecuted = false;
private async Task WaitForODConnection(MsaAuthenticationProvider msaAuthProvider)
{
await msaAuthProvider.RestoreMostRecentFromCacheOrAuthenticateUserAsync();
WaitForODConnectionExecuted = true;
}
It was not funny and I think my code is not clean so do not use it as it is without working a little on it.
I need to download JPG file from FileCabinet in NetSuite. For that I know the file name, so I searched file and assigned to FileObject. I got the object right, but got NULL content. I am providing here some code. Can anybody point out the error or any missing step here? Thank you.
var result = _service.search(flSearch);
if (result.totalRecords > 0)
{
recordList = result.recordList;
Record[] records = new Record[recordList.Length];
for (int j = 0; j < recordList.Length; j++)
{
if (recordList[j] is File)
{
File itemImage = (File)(recordList[j]);
byte[] data;
data = new Byte[(int)itemImage.fileSize];
data = itemImage.content; //Here getting NULL value
FileStream inFile;
using (inFile = new FileStream("newImage.jpg", FileMode.Create, FileAccess.Write))
{
inFile.Write(data, 0, data.Length);
}
}
}
}
itemImage is just a string - base64.
take that string and do a base64 decode and save that to your local file.
If the search is based on the internal id of the file you want to search, then the following code may help
var service = LoginNetSuite();
Tuple<string, string> fileContent = null;
FileSearch fileSearch = new FileSearch();
FileSearchBasic fileSearchBasic = new FileSearchBasic();
// Specify the folder in which the search is to be done.
SearchMultiSelectField folderFilter = new SearchMultiSelectField();
folderFilter.#operator = SearchMultiSelectFieldOperator.anyOf;
folderFilter.operatorSpecified = true;
RecordRef[] folder = new RecordRef[1];
folder[0] = new RecordRef();
folder[0].internalId = "78990"; // 78990 => Internal id of the folder.
folderFilter.searchValue = folder;
fileSearchBasic.folder = folderFilter;
// Specify the file internal id.
SearchMultiSelectField fileFilter = new SearchMultiSelectField();
fileFilter.#operator = SearchMultiSelectFieldOperator.anyOf;
fileFilter.operatorSpecified = true;
RecordRef[] rec = new RecordRef[1];
rec[0] = new RecordRef();
rec[0].internalId = "345656"; // 345656 => Internal id of the file.
fileFilter.searchValue = rec;
fileSearchBasic.internalId = fileFilter;
fileSearch.basic = fileSearchBasic;
var result = service.search(fileSearch);
var recordList = (Record[])result.recordList;
if (recordList != null && recordList.Length != 0)
{
var file = (File)result.recordList.First();
fileContent = new Tuple<string, string>(file.url, file.name);
}
In this code the folder internal id and the file internal id is given as the search parameters. So the file search will be done in the specified file cabinet with specified file id.
The response from netsuite will consist of the internal id, file name, url, folder name etc. The file can be downloaded from the url location.
Here's what's going on. I have an ASP.NET MVC 4 Web API web application. I can call API resources via URL. One of these functions get performance monitoring data for a specified amount of time and returns it in JSON once it has completed. However, what I want to do is return
It is IMPORTANT to note that I am working with a the browser and API resources in the model, not with a View. Please don't casually tell me to use Javascript in a View, because there is no view, or tell me to look at the SignalR wiki because the information for ".NET" sections is meant for desktop applications, not web apps. For example, you can't "Console.WriteLine()" to a browser.
To reiterate, I am using ASP.NET MVC 4 Web API to develop an API, and am calling the API via URL in the browser and it is returning JSON. I am attempting to use SignalR to have the app send JSON to the browser, but it is not doing anything at all. Rather, the application simply returns the completed JSON from the controller action with all of the performance data values once the process has completed. In other words, SignalR is not working.
So what I'm trying to do is while the API resource is gathering all the information, SignalR sends JSON to the browser every second so that the client can see what's going on in real time.
What I need to find out is why SignalR isn't sending it, and how I can send information to be displayed in the browser without Javascript, since I'm working from a model class, not from a view.
As you can see, I subscribe to the event using On, and then use Invoke to call the server-side hub method SendToClient.
Please let me know if I'm trying to do is impossible. I have never heard of a "real-time", dynamic API call via URL.
Here is my hub class. It is located in ~/signalr/hubs and is in a file called LiveHub.cs. The method Send is what I am trying to invoke in the method seen in the next code block.
namespace PerfMon2.signalr.hubs
{
public class LiveHub : Hub
{
public void SendToClient(List<DataValueInfo> json)
{
Clients.showValue(json);
}
}
}
Here is the method from LogDBRepository.cs that includes the SignalR calls.
public List<LogInfo> LogTimedPerfData(string macName, string categoryName, string counterName,
string instanceName, string logName, string live, long? seconds)
{
iModsDBRepository modsDB = new iModsDBRepository();
List<MachineInfo> theMac = modsDB.GetMachineByName(macName);
if (theMac.Count == 0)
return new List<LogInfo>();
else if (instanceName == null)
{
if (!PerformanceCounterCategory.Exists(categoryName, macName) ||
!PerformanceCounterCategory.CounterExists(counterName, categoryName, macName) )
{
return new List<LogInfo>();
}
}
else if (instanceName != null)
{
if (!PerformanceCounterCategory.Exists(categoryName, macName) ||
!PerformanceCounterCategory.CounterExists(counterName, categoryName, macName) ||
!PerformanceCounterCategory.InstanceExists(instanceName, categoryName, macName))
{
return new List<LogInfo>();
}
}
else if (logName == null)
{
return new List<LogInfo>();
}
// Check if entered log name is a duplicate for the authenticated user
List<LogInfo> checkDuplicateLog = this.GetSingleLog(logName);
if (checkDuplicateLog.Count > 0)
{
return new List<LogInfo>();
}
PerformanceCounterCategory category = new PerformanceCounterCategory(categoryName, theMac[0].MachineName);
if (category.CategoryName == null || category.MachineName == null)
{
return new List<LogInfo>();
}
List<LogInfo> logIt = new List<LogInfo>();
if (category.CategoryType != PerformanceCounterCategoryType.SingleInstance)
{
List<InstanceInfo> instances = modsDB.GetInstancesFromCatMacName(theMac[0].MachineName, category.CategoryName);
foreach (InstanceInfo inst in instances)
{
if (!category.InstanceExists(inst.InstanceName))
{
continue;
}
else if (inst.InstanceName.Equals(instanceName, StringComparison.OrdinalIgnoreCase))
{
PerformanceCounter perfCounter = new PerformanceCounter(categoryName, counterName,
inst.InstanceName, theMac[0].MachineName);
//CounterSample data = perfCounter.NextSample();
//double value = CounterSample.Calculate(data, perfCounter.NextSample());
string data = "";
List<UserInfo> currUser = this.GetUserByName(WindowsIdentity.GetCurrent().Name);
string timeStarted = DateTime.Now.ToString("MM/dd/yyyy - h:mm:ss tt");
//string[] dataValues = new string[(int)seconds];
List<string> dataValues = new List<string>();
var hubConnection = new HubConnection("http://localhost/PerfMon2/");
hubConnection.Credentials = CredentialCache.DefaultNetworkCredentials;
var perfMon = hubConnection.CreateProxy("LiveHub");
// perfMon.On("sendValue", message => Console.WriteLine(message));
perfMon.On("showValue", json => Console.WriteLine(json));
hubConnection.Start().Wait();
List<DataValueInfo> lol = new List<DataValueInfo>();
for (int i = 0; i < seconds; i++)
{
data = "Value " + i + ": " + perfCounter.NextValue().ToString();
//dataValues[i] = data;
dataValues.Add(data);
lol.Add(new DataValueInfo
{
Value = perfCounter.NextValue().ToString()
});
// perfMon.Invoke<List<DataValueInfo>>("Send", lol);
perfMon.Invoke("SendToClient", lol);
Thread.Sleep(1000);
}
string timeFinished = DateTime.Now.ToString("MM/dd/yyyy - h:mm:ss tt");
Log log = new Log
{
LogName = logName,
CounterName = perfCounter.CounterName,
InstanceName = perfCounter.InstanceName,
CategoryName = perfCounter.CategoryName,
MachineName = perfCounter.MachineName,
TimeStarted = timeStarted,
TimeFinished = timeFinished,
PerformanceData = string.Join(",", dataValues),
UserID = currUser[0].UserID
};
this.CreateLog(log);
logIt.Add(new LogInfo
{
LogName = logName,
CounterName = perfCounter.CounterName,
InstanceName = perfCounter.InstanceName,
CategoryName = perfCounter.CategoryName,
MachineName = perfCounter.MachineName,
TimeStarted = timeStarted,
TimeFinished = timeFinished,
PerformanceData = dataValues.ToList<string>()
});
break;
}
}
}
else
{
PerformanceCounter perfCounter = new PerformanceCounter(categoryName, counterName,
"", theMac[0].MachineName);
string data = "";
List<UserInfo> currUser = this.GetUserByName(WindowsIdentity.GetCurrent().Name);
string timeStarted = DateTime.Now.ToString("MM/dd/yyyy - h:mm:ss tt");
//string[] dataValues = new string[(int)seconds];
List<string> dataValues = new List<string>();
var hubConnection = new HubConnection("http://localhost/PerfMon2/");
hubConnection.Credentials = CredentialCache.DefaultNetworkCredentials;
var perfMon = hubConnection.CreateProxy("LiveHub");
// perfMon.On("sendValue", message => Console.WriteLine(message));
perfMon.On("showValue", json => Console.WriteLine(json));
hubConnection.Start().Wait();
List<DataValueInfo> lol = new List<DataValueInfo>();
for (int i = 0; i < seconds; i++)
{
data = "Value " + i + ": " + perfCounter.NextValue().ToString();
//dataValues[i] = data;
dataValues.Add(data);
lol.Add(new DataValueInfo
{
Value = perfCounter.NextValue().ToString()
});
// perfMon.Invoke<List<DataValueInfo>>("Send", lol);
perfMon.Invoke("SendToClient", lol);
Thread.Sleep(1000);
}
string timeFinished = DateTime.Now.ToString("MM/dd/yyyy - h:mm:ss tt");
Log log = new Log
{
LogName = logName,
CounterName = perfCounter.CounterName,
InstanceName = perfCounter.InstanceName,
CategoryName = perfCounter.CategoryName,
MachineName = perfCounter.MachineName,
TimeStarted = timeStarted,
TimeFinished = timeFinished,
PerformanceData = string.Join(",", dataValues),
UserID = currUser[0].UserID
};
this.CreateLog(log);
logIt.Add(new LogInfo
{
LogName = logName,
CounterName = perfCounter.CounterName,
InstanceName = perfCounter.InstanceName,
CategoryName = perfCounter.CategoryName,
MachineName = perfCounter.MachineName,
TimeStarted = timeStarted,
TimeFinished = timeFinished,
PerformanceData = dataValues.ToList<string>()
});
}
return logIt;
}
Here is the controller for the method in LogController.cs :
[AcceptVerbs("GET", "POST")]
public List<LogInfo> Log_Perf_Data(string machine_name, string category_name, string counter_name, string instance_name,
string log_name, long? seconds, string live, string enforceQuery)
{
LogController.CheckUser();
// POST api/log/post_data?machine_name=&category_name=&counter_name=&instance_name=&log_name=&seconds=
if (machine_name != null && category_name != null && counter_name != null && log_name != null && seconds.HasValue && enforceQuery == null)
{
List<LogInfo> dataVal = logDB.LogTimedPerfData(machine_name, category_name, counter_name, instance_name,
log_name, live, seconds);
logDB.SaveChanges();
return dataVal;
}
return new List<LogInfo>();
}
Maybe you can implement it in push technique. Here is how I do it:
Class with message
public class Message
{
/// <summary>
/// The name who will receive this message.
/// </summary>
public string RecipientName { get; set; }
/// <summary>
/// The message content.
/// </summary>
public string MessageContent { get; set; }
}
Class that will represent client:
public class Client
{
private ManualResetEvent messageEvent = new ManualResetEvent(false);
private Queue<Message> messageQueue = new Queue<Message>();
/// <summary>
/// This method is called by a sender to send a message to this client.
/// </summary>
/// <param name="message">the new message</param>
public void EnqueueMessage(Message message)
{
lock (messageQueue)
{
messageQueue.Enqueue(message);
// Set a new message event.
messageEvent.Set();
}
}
/// <summary>
/// This method is called by the client to receive messages from the message queue.
/// If no message, it will wait until a new message is inserted.
/// </summary>
/// <returns>the unread message</returns>
public Message DequeueMessage()
{
// Wait until a new message.
messageEvent.WaitOne();
lock (messageQueue)
{
if (messageQueue.Count == 1)
{
messageEvent.Reset();
}
return messageQueue.Dequeue();
}
}
}
Class to send messages to clients:
public class ClientAdapter
{
/// <summary>
/// The recipient list.
/// </summary>
private Dictionary<string, Client> recipients = new Dictionary<string,Client>();
/// <summary>
/// Send a message to a particular recipient.
/// </summary>
public void SendMessage(Message message)
{
if (recipients.ContainsKey(message.RecipientName))
{
Client client = recipients[message.RecipientName];
client.EnqueueMessage(message);
}
}
/// <summary>
/// Called by a individual recipient to wait and receive a message.
/// </summary>
/// <returns>The message content</returns>
public string GetMessage(string userName)
{
string messageContent = string.Empty;
if (recipients.ContainsKey(userName))
{
Client client = recipients[userName];
messageContent = client.DequeueMessage().MessageContent;
}
return messageContent;
}
/// <summary>
/// Join a user to the recipient list.
/// </summary>
public void Join(string userName)
{
recipients[userName] = new Client();
}
/// <summary>
/// Singleton pattern.
/// This pattern will ensure there is only one instance of this class in the system.
/// </summary>
public static ClientAdapter Instance = new ClientAdapter();
private ClientAdapter() { }
}
Sending messages:
Message message = new Message
{
RecipientName = tbRecipientName.Text.Trim(),
MessageContent = tbMessageContent.Text.Trim()
};
if (!string.IsNullOrWhiteSpace(message.RecipientName) && !string.IsNullOrEmpty(message.MessageContent))
{
// Call the client adapter to send the message to the particular recipient instantly.
ClientAdapter.Instance.SendMessage(message);
}
Receive messages (this is JavaScript functions written in test page. They render content of the message on ASPX page. Here you should implement your logic):
// This method will persist a http request and wait for messages.
function waitEvent() {
CSASPNETReverseAJAX.Dispatcher.WaitMessage("<%= Session["userName"] %>",
function (result) {
displayMessage(result);
// Keep looping.
setTimeout(waitEvent, 0);
}, function () {
// Keep looping.
setTimeout(waitEvent, 0);
});
}
// Append a message content to the result panel.
function displayMessage(message) {
var panel = document.getElementById("<%= lbMessages.ClientID %>");
panel.innerHTML += currentTime() + ": " + message + "<br />";
}
// Return a current time string.
function currentTime() {
var currentDate = new Date();
return currentDate.getHours() + ":" + currentDate.getMinutes() + ":" + currentDate.getSeconds();
}
I'm writing a program to allow a user to upload files to their Google Drive account. I have the upload part working and am using OAuth2. The issue I'm currently having is getting a list of folders from the users Drive account.
I found some code that is supposed to do this using the .setUserCredentials method, but it doesn't work:
DocumentsService service1 = new DocumentsService("project");
service1.setUserCredentials("user","pass");
FolderQuery query1 = new FolderQuery();
// Make a request to the API and get all documents.
DocumentsFeed feed = service1.Query(query1);
// Iterate through all of the documents returned
foreach (DocumentEntry entry in feed.Entries)
{
var blech = entry.Title.Text;
}
Nothing is returned. Ideally, I want to use OAuth2 to do this. I've been trying with the following code, trying to set the authentication token, but I always get denied access:
String CLIENT_ID = "clientid";
String CLIENT_SECRET = "secretid";
var docprovider = new NativeApplicationClient(GoogleAuthenticationServer.Description, CLIENT_ID, CLIENT_SECRET);
var docstate = GetDocAuthentication(docprovider);
DocumentsService service1 = new DocumentsService("project");
service1.SetAuthenticationToken(docstate.RefreshToken);
FolderQuery query1 = new FolderQuery();
DocumentsFeed feed = service1.Query(query1); //get error here
// Iterate through all of the documents returned
foreach (DocumentEntry entry in feed.Entries)
{
// Print the title of this document to the screen
var blech = entry.Title.Text;
}
..
private static IAuthorizationState GetDocAuthentication(NativeApplicationClient client)
{
const string STORAGE = "storagestring";
const string KEY = "keystring";
string scope = "https://docs.google.com/feeds/default/private/full/-/folder";
// Check if there is a cached refresh token available.
IAuthorizationState state = AuthorizationMgr.GetCachedRefreshToken(STORAGE, KEY);
if (state != null)
{
try
{
client.RefreshToken(state);
return state; // Yes - we are done.
}
catch (DotNetOpenAuth.Messaging.ProtocolException ex)
{
}
}
// Retrieve the authorization from the user.
state = AuthorizationMgr.RequestNativeAuthorization(client, scope);
AuthorizationMgr.SetCachedRefreshToken(STORAGE, KEY, state);
return state;
}
Specifically, I get "Execution of request failed: https://docs.google.com/feeds/default/private/full/-/folder - The remote server returned an error: (401) Unauthorized".
I've also tried:
var docauth = new OAuth2Authenticator<NativeApplicationClient>(docprovider, GetDocAuthentication);
DocumentsService service1 = new DocumentsService("project");
service1.SetAuthenticationToken(docauth.State.AccessToken);
but "State" is always null, so I get a null object error. What am I doing wrong and how is this done?
You should use the Drive SDK, not the Documents List API, which allows you to list folders. You can use "root" as a folderId if you want to list the root directory.
I actually implemented the v3 version of the GDrive SDK for .NET and needed to search for folders as well.
I prefer requesting uniquely all folders instead of getting all files and then performing a LinQ query to keep just the folders.
This is my implementation:
private async Task<bool> FolderExistsAsync(string folderName)
{
var response = await GetAllFoldersAsync();
return response.Files
.Where(x => x.Name.ToLower() == folderName.ToLower())
.Any();
}
private async Task<Google.Apis.Drive.v3.Data.FileList> GetAllFoldersAsync()
{
var request = _service.Files.List();
request.Q = "mimeType = 'application/vnd.google-apps.folder'";
var response = await request.ExecuteAsync();
return response;
}
You could request the name on the Q this way as well:
request.Q = $"mimeType = 'application/vnd.google-apps.folder' and name = '{folderName}'";
Which would lead and simplify things to (obviating null checking):
private async Task<bool> FolderExistsAsync(string folderName)
{
var response = await GetDesiredFolder(folderName);
return response.Files.Any();
}
private async Task<FileList> GetDesiredFolder(string folderName)
{
var request = _service.Files.List();
request.Q = $"mimeType = 'application/vnd.google-apps.folder' and name = '{folderName}'";
var response = await request.ExecuteAsync();
return response;
}
private IEnumerable<DocumentEntry> GetFolders(string id) {
if (IsLogged) {
var query = new FolderQuery(id)
{
ShowFolders = true
};
var feed = GoogleDocumentsService.Query(query);
return feed.Entries.Cast<DocumentEntry>().Where(x => x.IsFolder).OrderBy(x => x.Title.Text);
}
return null;
}
...
var rootFolders = GetFolders("root");
if (rootFolders != null){
foreach(var folder in rootFolders){
var subFolders = GetFolders(folder.ResourceId);
...
}
}
where GoogleDocumentsService is a instance of DocumentsService and IsLogged is a success logged flag.
I got this way to get list of folders from google drive
FilesResource.ListRequest filelist= service.Files.List();
filelist.Execute().Items.ToList().Where(x => x.MimeType == "application/vnd.google-apps.folder").ToList()