I am uploading a 10GB file to my ftp server using c# ftpwebrequest. How can the rate of transfer be calculated ?
The following is the code:
FileStream fs = null;
Stream rs = null;
try
{
string uploadFileName = new FileInfo(file).Name;
string uploadUrl = ftpServer;
Console.WriteLine("Start Time: {0}", DateTime.Now);
fs = new FileStream(file, FileMode.Open, FileAccess.Read);
string ftpUrl = string.Format("{0}/{1}", uploadUrl, uploadFileName);
FtpWebRequest requestObj = FtpWebRequest.Create(ftpUrl) as FtpWebRequest;
requestObj.Method = WebRequestMethods.Ftp.UploadFile;
rs = requestObj.GetRequestStream();
byte[] buffer = new byte[40960];
int read = 0;
while ((read = fs.Read(buffer, 0, buffer.Length)) != 0)
{
rs.Write(buffer, 0, read);
}
rs.Flush();
}
catch (Exception ex)
{
Console.WriteLine("File upload/transfer Failed.\r\nError Message:\r\n" + ex.Message);
}
finally
{
if (fs != null)
{
fs.Close();
fs.Dispose();
}
if (rs != null)
{
rs.Close();
rs.Dispose();
}
}
Console.WriteLine("End Time: {0}", DateTime.Now);
Console.WriteLine("Exiting the application.. press any key to continue");
Console.ReadLine();
The upload speed was low so I bumped up the Buffer Size to 40960 after referring to some articles. The speed has increased a bit.
Please explain the solution as well for my understanding as I am a beginner. Thanks in advance.
Related
I've tried downloading file from FTP server in chunks using the FtpWebRequest and merging the chunks to original file. The process works fine for 4GB or lower files but when trying the same process for 8 or 9GB files I'm getting an error
Here is the error I'm getting
Here is the sample code I've worked out
private static long limitSize = Convert.ToInt64(ConfigurationManager.AppSettings["LimitSize"]);
public static void DownloadFromFTP()
{
var guid = Guid.NewGuid().ToString();
var path = $"{System.Environment.CurrentDirectory}/UploadedFiles/{guid}";
try
{
string strFilePath = ConfigurationManager.AppSettings["FilePath"];
NetworkCredential credentials = new NetworkCredential(ConfigurationManager.AppSettings["UserName"], ConfigurationManager.AppSettings["Password"]);
Console.WriteLine("Starting...");
string name = ConfigurationManager.AppSettings["FileName"];
var strFolderPath = ConfigurationManager.AppSettings["Key"] + name;
FtpWebRequest sizeRequest = (FtpWebRequest)WebRequest.Create(strFilePath);
sizeRequest.KeepAlive = false;
sizeRequest.Credentials = credentials;
sizeRequest.Method = WebRequestMethods.Ftp.GetFileSize;
long size = sizeRequest.GetResponse().ContentLength;
Console.WriteLine($"File has {size} bytes");
double filesizecheck = Convert.ToDouble(size) / Convert.ToDouble(limitSize);
int chunks = Convert.ToInt32(Math.Ceiling(filesizecheck));
long chunkLength = size / chunks;
List<Task> tasks = new List<Task>();
if (!System.IO.Directory.Exists(path))
{
System.IO.Directory.CreateDirectory(path);
}
var filepath = $"{path}/{name}";
for (int chunk = 0; chunk < chunks; chunk++)
{
int i = chunk;
tasks.Add(Task.Run(() =>
{
try
{
FtpWebRequest request = (FtpWebRequest)WebRequest.Create(strFilePath);
request.Credentials = credentials;
request.Method = WebRequestMethods.Ftp.DownloadFile;
request.UseBinary = true;
request.UsePassive = true;
request.KeepAlive = false;
request.ContentOffset = chunkLength * i;
long toread =
(i < chunks - 1) ? chunkLength : size - request.ContentOffset;
Console.WriteLine(
$"Downloading chunk {i + 1}/{chunks} with {toread} bytes ...");
using (Stream ftpStream = request.GetResponse().GetResponseStream())
using (Stream fileStream = File.Create(filepath + "." + i))
{
var bufferSize = Convert.ToInt32(ConfigurationManager.AppSettings["BufferSize"]);
byte[] buffer = new byte[bufferSize];
int read;
while (((read = (int)Math.Min(buffer.Length, toread)) > 0) &&
((read = ftpStream.Read(buffer, 0, read)) > 0))
{
fileStream.Write(buffer, 0, read);
toread -= read;
}
}
Console.WriteLine($"Downloaded chunk {i + 1}/{chunks}");
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex}");
Console.ReadKey();
}
}));
}
Console.WriteLine("Started all chunks downloads, waiting for them to complete...");
Task.WaitAll(tasks.ToArray());
CombineMultipleFilesIntoSingleFile(path, filepath);
var result = UploadToS3(filepath, strFolderPath, size, path).Result;
Console.WriteLine("Done");
Console.ReadKey();
}
catch (Exception ex)
{
Console.WriteLine("Exception " + ex.Message);
DeleteFiles(path);
Console.ReadKey();
}
Console.ReadKey();
Console.ReadKey();
}
Here is the method to merge the files
private static void CombineMultipleFilesIntoSingleFile(string inputDirectoryPath, string outputFilePath)
{
string[] inputFilePaths = Directory.GetFiles(inputDirectoryPath);
Console.WriteLine("Number of files: {0}.", inputFilePaths.Length);
using (var outputStream = File.Create(outputFilePath))
{
foreach (var inputFilePath in inputFilePaths)
{
using (var inputStream = File.OpenRead(inputFilePath))
{
// Buffer size can be passed as the second argument.
inputStream.CopyTo(outputStream);
}
Console.WriteLine("The file {0} has been processed.", inputFilePath);
}
}
}
App Config settings
<add key="LimitSize" value="10000000"/>
<add key="BufferSize" value="10240"/>
I am trying to upload a large file (1 GB) from code to SharePoint 2013 on prem. I followed this tutorial, I dowloaded from NuGet the package "Microsoft.SharePointOnline.CSOM" and tried this piece of code:
public Microsoft.SharePoint.Client.File UploadFileSlicePerSlice(ClientContext ctx, string libraryName, string fileName, int fileChunkSizeInMB = 3)
{
// Each sliced upload requires a unique ID.
Guid uploadId = Guid.NewGuid();
// Get the name of the file.
string uniqueFileName = Path.GetFileName(fileName);
// Ensure that target library exists, and create it if it is missing.
if (!LibraryExists(ctx, ctx.Web, libraryName))
{
CreateLibrary(ctx, ctx.Web, libraryName);
}
// Get the folder to upload into.
List docs = ctx.Web.Lists.GetByTitle(libraryName);
ctx.Load(docs, l => l.RootFolder);
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// File object.
Microsoft.SharePoint.Client.File uploadFile;
// Calculate block size in bytes.
int blockSize = fileChunkSizeInMB * 1024 * 1024;
// Get the information about the folder that will hold the file.
ctx.Load(docs.RootFolder, f => f.ServerRelativeUrl);
ctx.ExecuteQuery();
// Get the size of the file.
long fileSize = new FileInfo(fileName).Length;
if (fileSize <= blockSize)
{
// Use regular approach.
using (FileStream fs = new FileStream(fileName, FileMode.Open))
{
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = fs;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
ctx.Load(uploadFile);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
// Use large file upload approach.
ClientResult<long> bytesUploaded = null;
FileStream fs = null;
try
{
fs = System.IO.File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
using (BinaryReader br = new BinaryReader(fs))
{
byte[] buffer = new byte[blockSize];
Byte[] lastBuffer = null;
long fileoffset = 0;
long totalBytesRead = 0;
int bytesRead;
bool first = true;
bool last = false;
// Read data from file system in blocks.
while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
{
totalBytesRead = totalBytesRead + bytesRead;
// You've reached the end of the file.
if (totalBytesRead == fileSize)
{
last = true;
// Copy to a new buffer that has the correct size.
lastBuffer = new byte[bytesRead];
Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
}
if (first)
{
using (MemoryStream contentStream = new MemoryStream())
{
// Add an empty file.
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = contentStream;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
// Start upload by uploading the first slice.
using (MemoryStream s = new MemoryStream(buffer))
{
// Call the start upload method on the first slice.
bytesUploaded = uploadFile.StartUpload(uploadId, s);
ctx.ExecuteQuery();//<------here exception
// fileoffset is the pointer where the next slice will be added.
fileoffset = bytesUploaded.Value;
}
// You can only start the upload once.
first = false;
}
}
else
{
// Get a reference to your file.
uploadFile = ctx.Web.GetFileByServerRelativeUrl(docs.RootFolder.ServerRelativeUrl + System.IO.Path.AltDirectorySeparatorChar + uniqueFileName);
if (last)
{
// Is this the last slice of data?
using (MemoryStream s = new MemoryStream(lastBuffer))
{
// End sliced upload by calling FinishUpload.
uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Return the file object for the uploaded file.
return uploadFile;
}
}
else
{
using (MemoryStream s = new MemoryStream(buffer))
{
// Continue sliced upload.
bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// Update fileoffset for the next slice.
fileoffset = bytesUploaded.Value;
}
}
}
} // while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
}
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
return null;
}
But I'm getting runtime exception : ServerExecution with the message: Method "StartUpload" does not exist at line "ctx.ExecuteQuery();" (<-- I marked this line in the code)
I also tried with SharePoint2013 package and the method "startupload" doesn't supported in this package.
UPDATE:
Adam's code worked for ~1GB files it turns out that inside web.config in the path : C:\inetpub\wwwroot\wss\VirtualDirectories\{myport}\web.config
at the part <requestLimit maxAllowedContentLength="2000000000"/> that's in bytes and not kilobytes as I thougt at the begining, therefore I changed to 2000000000 and it worked.
method to upload 1 GB file on SP 2013 using CSOM that works (tested and developed for couple of days of trying different approaches :) )
try
{
Console.WriteLine("start " + DateTime.Now.ToLongDateString() + " " + DateTime.Now.ToLongTimeString());
using (ClientContext context = new ClientContext("[URL]"))
{
context.Credentials = new NetworkCredential("[LOGIN]","[PASSWORD]","[DOMAIN]");
context.RequestTimeout = -1;
Web web = context.Web;
if (context.HasPendingRequest)
context.ExecuteQuery();
byte[] fileBytes;
using (var fs = new FileStream(#"D:\OneGB.rar", FileMode.Open, FileAccess.Read))
{
fileBytes = new byte[fs.Length];
int bytesRead = fs.Read(fileBytes, 0, fileBytes.Length);
}
using (var fileStream = new System.IO.MemoryStream(fileBytes))
{
Microsoft.SharePoint.Client.File.SaveBinaryDirect(context, "/Shared Documents/" + "OneGB.rar", fileStream, true);
}
}
Console.WriteLine("end " + DateTime.Now.ToLongDateString() + " " + DateTime.Now.ToLongTimeString());
}
catch (Exception ex)
{
Console.WriteLine("error -> " + ex.Message);
}
finally
{
Console.ReadLine();
}
Besides this I had to:
extend the max file upload on CA for this web application,
set on CA for this web application 'web page security Validation' on
Never (in this link there is a screen how to set it)
extend timeout on IIS
and the final result is:
sorry for the lang but I usually work in PL
all history defined here post
Install the SharePoint Online CSOM library using the command below.
Install-Package Microsoft.SharePointOnline.CSOM -Version 16.1.8924.1200
Then use the code below to upload the large file.
int blockSize = 8000000; // 8 MB
string fileName = "C:\\temp\\6GBTest.odt", uniqueFileName = String.Empty;
long fileSize;
Microsoft.SharePoint.Client.File uploadFile = null;
Guid uploadId = Guid.NewGuid();
using (ClientContext ctx = new ClientContext("siteUrl"))
{
ctx.Credentials = new SharePointOnlineCredentials("user#tenant.onmicrosoft.com", GetSecurePassword());
List docs = ctx.Web.Lists.GetByTitle("Documents");
ctx.Load(docs.RootFolder, p => p.ServerRelativeUrl);
// Use large file upload approach
ClientResult<long> bytesUploaded = null;
FileStream fs = null;
try
{
fs = System.IO.File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
fileSize = fs.Length;
uniqueFileName = System.IO.Path.GetFileName(fs.Name);
using (BinaryReader br = new BinaryReader(fs))
{
byte[] buffer = new byte[blockSize];
byte[] lastBuffer = null;
long fileoffset = 0;
long totalBytesRead = 0;
int bytesRead;
bool first = true;
bool last = false;
// Read data from filesystem in blocks
while ((bytesRead = br.Read(buffer, 0, buffer.Length)) > 0)
{
totalBytesRead = totalBytesRead + bytesRead;
// We've reached the end of the file
if (totalBytesRead <= fileSize)
{
last = true;
// Copy to a new buffer that has the correct size
lastBuffer = new byte[bytesRead];
Array.Copy(buffer, 0, lastBuffer, 0, bytesRead);
}
if (first)
{
using (MemoryStream contentStream = new MemoryStream())
{
// Add an empty file.
FileCreationInformation fileInfo = new FileCreationInformation();
fileInfo.ContentStream = contentStream;
fileInfo.Url = uniqueFileName;
fileInfo.Overwrite = true;
uploadFile = docs.RootFolder.Files.Add(fileInfo);
// Start upload by uploading the first slice.
using (MemoryStream s = new MemoryStream(buffer))
{
// Call the start upload method on the first slice
bytesUploaded = uploadFile.StartUpload(uploadId, s);
ctx.ExecuteQuery();
// fileoffset is the pointer where the next slice will be added
fileoffset = bytesUploaded.Value;
}
// we can only start the upload once
first = false;
}
}
else
{
// Get a reference to our file
uploadFile = ctx.Web.GetFileByServerRelativeUrl(docs.RootFolder.ServerRelativeUrl + System.IO.Path.AltDirectorySeparatorChar + uniqueFileName);
if (last)
{
// Is this the last slice of data?
using (MemoryStream s = new MemoryStream(lastBuffer))
{
// End sliced upload by calling FinishUpload
uploadFile = uploadFile.FinishUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// return the file object for the uploaded file
return uploadFile;
}
}
else
{
using (MemoryStream s = new MemoryStream(buffer))
{
// Continue sliced upload
bytesUploaded = uploadFile.ContinueUpload(uploadId, fileoffset, s);
ctx.ExecuteQuery();
// update fileoffset for the next slice
fileoffset = bytesUploaded.Value;
}
}
}
}
}
}
finally
{
if (fs != null)
{
fs.Dispose();
}
}
}
Or download the example code from GitHub.
Large file upload with CSOM
I'm looking for a way to upload 1GB file to SharePoint 2013
You can change the upload limit with the PowerShell below:
$a = [Microsoft.SharePoint.Administration.SPWebService]::ContentService
$a.ClientRequestServiceSettings.MaxReceivedMessageSize = 209715200
$a.Update()
References:
https://thuansoldier.net/4328/
https://blogs.msdn.microsoft.com/sridhara/2010/03/12/uploading-files-using-client-object-model-in-sharepoint-2010/
https://social.msdn.microsoft.com/Forums/en-US/09a41ba4-feda-4cf3-aa29-704cd92b9320/csom-microsoftsharepointclientserverexception-method-8220startupload8221-does-not-exist?forum=sharepointdevelopment
Update:
SharePoint CSOM request size is very limited, it cannot exceed a 2 MB limit and you cannot change this setting in Office 365 environment. If you have to upload bigger files you have to use REST API. Here is MSDN reference https://msdn.microsoft.com/en-us/library/office/dn292553.aspx
Also see:
https://gist.github.com/vgrem/10713514
File Upload to SharePoint 2013 using REST API
Ref: https://sharepoint.stackexchange.com/posts/149105/edit (see the 2nd answer).
I'm trying to send file from TCP client to listener. Its all working but after the file is sent, the client is disconnecting from the server. Here is the code I'm currently using for the client:
public static void SendFile(FileInfo file)
{
try
{
long size = file.Length;
using (NetworkStream ns = client.GetStream())
{
using (FileStream Fs = new FileStream(file.FullName, FileMode.Open, FileAccess.Read))
{
int num;
byte[] buffer = new byte[Fs.Length];
while ((num = Fs.Read(buffer, 0, buffer.Length)) != 0)
{
ns.Write(buffer, 0, num);
}
Fs.Close();
ns.Close();
}
}
FileInfo p_c = new FileInfo(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + #"\destfile.bin");
p_c.Delete();
} catch(Exception ex)
{
}
}
and for the server:
using (NetworkStream ns = new NetworkStream(current))
{
using (FileStream Fs = new FileStream(full_path, FileMode.OpenOrCreate, FileAccess.Write))
{
while ((RecBytes = ns.Read(RecData, 0, RecData.Length)) > 0)
{
Fs.Write(RecData, 0, RecBytes);
totalrecbytes += RecBytes;
}
{
Fs.Close();
ns.Close();
Console.WriteLine("File received. Path: {0}", full_path);
}
}
Calling NetworkStream.Close() or NetworkStream.Dispose (at the end of the using clause) will terminate the connection.
If you want to keep the socket open, use the NetworkStream(Socket, bool) constructor and and pass false as the second parameter.
I want to create a TcpClient which automatically gets multiple files from server by their name.
I want to get some ideas how I can build such application.
My idea is:
Make a for loop which contains SwitchCase, where I specify my files names. I really don't know if this will work well.
To go out of for loop I can compare the index operator to numbers of files. If they are equal then I go out of for loop.
Example of my idea:
for (int i = 1; i <= 4; i++)
{
switch (----)
{
case 'file1':
code...
break;
case 'file2':
code...
case 'file3':
code...
break;
case 'file4':
code...
break;
default:
code...
break;
}
}
To download a file using ftp you could use the FtpWebRequest and for http use the HttpWebRequest.
Below is a simple example of how to request a file using http (the method is similar for ftp):
public void Download(string url, string localPath)
{
HttpWebRequest request = HttpWebRequest.Create(url);
HttpWebResponse response = request.GetResponse() as HttpWebResponse;
Stream stream = response.GetResponseStream();
FileStream fs = new FileStream(localPath, FileMode.Create);
int count;
byte[] buffer = new byte[8096];
while ((count = stream.Read(buffer, 0, 8096)) > 0)
fs.Write(buffer, 0, count);
fs.Dispose();
response.Close();
}
Instead of using a switch inside a for loop you should iterate an array:
string[] files = new string[]{ url1, url2, ...};
for(int i = 0; i < files.Length; i++)
{
Download(files[i], "file" + i);
}
I solved it like so:
MY app. gets 2 files from server and move files and rename them.
test = mytest
test111 = test2
static string myfile1 = #"C:\inbox\mytest.txt";
static string myfile2 = #"C:\inbox\test2.txt";
//files from server
static string myServerfile = #"C:\Users\me\Documents\file_client\bin\Debug\test.csv";
static string myServerfile1 = #"C:\Users\RH-T3\Documents\file_client\bin\Debug\test111.txt";
public static void Main(string[] args)
{
try
{
for (int i = 0; i < 2; i++)
{
if (i == 0)
{
Console.WriteLine("Downloading test.csv");
string fileName = "test.csv";
Console.WriteLine("Client starts...");
//args[0] = Console.ReadLine();
file_client client = new file_client(args);
Console.WriteLine("efter file_client...");
NetworkStream serverStream = client.clientSocket.GetStream();
LIB.writeTextTCP(serverStream, fileName);
long rest = long.Parse(LIB.readTextTCP(serverStream));
byte[] inStream = new byte[rest];
while (rest != 0)
{
rest = rest - serverStream.Read(inStream, 0, inStream.Length);
Console.WriteLine("REST: " + rest);
}
FileStream fs = new FileStream(fileName, FileMode.Create);
fs.Write(inStream, 0, inStream.Length);
{
fs.Close();
serverStream.Close();
}
if (File.Exists(myfile1))
{
File.Delete(myfile1);
}
File.Move(myServerfile, myfile1);
Console.WriteLine("Moved");
System.Threading.Thread.Sleep(500);
}
else
{
Console.WriteLine("Downloading .txt file");
string fileName = "test111.txt";
Console.WriteLine("Client starts...");
//args[0] = Console.ReadLine();
file_client client = new file_client(args);
Console.WriteLine("efter file_client...");
NetworkStream serverStream = client.clientSocket.GetStream();
LIB.writeTextTCP(serverStream, fileName);
long rest = long.Parse(LIB.readTextTCP(serverStream));
byte[] inStream = new byte[rest];
while (rest != 0)
{
rest = rest - serverStream.Read(inStream, 0, inStream.Length);
Console.WriteLine("REST: " + rest);
}
FileStream fs = new FileStream(fileName, FileMode.Create);
fs.Write(inStream, 0, inStream.Length);
{
fs.Close();
serverStream.Close();
}
if (File.Exists(myfile2))
{
File.Delete(myfile2);
}
File.Move(myServerfile1, myfile2);
Console.WriteLine("Moved");
System.Threading.Thread.Sleep(500);
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
Console.WriteLine("Cannot be DONE!");
}
I think I have some memory issue with a function that should download a file from ftp server. The reason that I think it's a memory thing is because it works fine during debug (maybe it gives the garbage collector more time). Yet I thought that the using should solve this...
Just to be clear the function works when called once yet calling it several times in a for loop invokes the err message: 550 The specified network name is no longer available.
Please help
Asaf
private void downloadFile(string sourceFile, string targetFolder)
{
string remoteFile = sourceFile.Replace("\\","//");
string localFolder = targetFolder + "\\" + sourceFile.Substring(sourceFile.LastIndexOf("\\")+1);
string filename = "ftp://" + ftpServerIP + "//" + remoteFile;
FtpWebRequest ftpReq = (FtpWebRequest)WebRequest.Create(filename);
ftpReq.Method = WebRequestMethods.Ftp.DownloadFile;
ftpReq.Credentials = new NetworkCredential(ftpUserID, ftpPassword);
ftpReq.UseBinary = true;
ftpReq.Proxy = null;
ftpReq.KeepAlive = false; //'3. Settings and action
try
{
using (System.Net.FtpWebResponse response = (System.Net.FtpWebResponse)(ftpReq.GetResponse()))
{
using (System.IO.Stream responseStream = response.GetResponseStream())
{
using (System.IO.FileStream fs = new System.IO.FileStream(localFolder, System.IO.FileMode.Create))
{
Byte[] buffer = new byte[2047];
int read = 0;
do
{
read = responseStream.Read(buffer, 0, buffer.Length);
fs.Write(buffer, 0, read);
} while (read == 0);
responseStream.Close();
fs.Flush();
fs.Close();
}
responseStream.Close();
}
response.Close();
}
}
catch (WebException ex)
{
FtpWebResponse response = (FtpWebResponse)ex.Response;
Console.Out.WriteLine(response.StatusDescription);
}
}
There's a bug in the read loop that causes large files to be truncated. Use:
int read;
while ((read = responseStream.Read(buffer, 0, buffer.Length)) != 0)
{
fs.Write(buffer, 0, read);
}
With that change in place I was able to download a number of large files via FTP without encountering exceptions.