so I have a script that essentially iterates through a bunch of delimited text files and uploads the images from said files to a SharePoint site. It works great, expect with one minor problem, I have a couple of images that are >4MB in size and these give me a (400) Bad Request error when the script attempts to upload them.
Code below:
class spImageUpload()
{
private static System.Collections.Generic.List<string> keywords;
private static NetworkCredential credentials = new NetworkCredential(username, password, domain);
private static ClientContext clientContext = new ClientContext(site name);
private static Web site = clientContext.Web;
private static List list = site.Lists.GetByTitle(listName);
private static FileCreationInformation newFile = new FileCreationInformation();
private static Image image = new Image();
private static FileIO fo = new FileIO();
public SharePointAccess()
{
sharepointLogin();
uploadImage();
}
private static void updateFields()
{
//Loads the site list
clientContext.Load(list);
//Creates a ListItemCollection object from list
ListItemCollection listItems = list.GetItems(CamlQuery.CreateAllItemsQuery());
//Loads the listItems
clientContext.Load(listItems);
//Executes the previous queries on the server
clientContext.ExecuteQuery();
//For each listItem...
foreach (var listItem in listItems)
{
//Writes out the item ID and Title
//Console.WriteLine("Id: {0} Title: {1}", listItem.Id, listItem["Title"]);
//Loads the files from the listItem
clientContext.Load(listItem.File);
//Executes the previous query
clientContext.ExecuteQuery();
//Writes out the listItem File Name
//Console.WriteLine("listItem File Name: {0}", listItem.File.Name);
//Looks for the most recently uploaded file, if found...
if (listItem.File.Name.Contains(fileName))
{
title = fileName;
//Changes the Title field value
listItem["Title"] = title;
//Changes the Keywords field value using the keywords list
foreach (var keyword in keywords)
{
listItem["Keywords"] += keyword;
//Writes out the item ID, Title, and Keywords
//Console.WriteLine("Id: {0} Title: {1} Keywords: {2}", listItem.Id, listItem["Title"], listItem["Keywords"]);
}
}
//Remember changes...
listItem.Update();
}
//Executes the previous query and ensures changes are committed to the server
clientContext.ExecuteQuery();
}
private static void uploadImage()
{
try
{
fo.loadFile();
foreach (var img in fo.lImageSet)
{
Console.WriteLine("Image Name: {0}", img.getName());
}
foreach (var img in fo.lImageSet)
{
DateTime start;
DateTime end;
start = DateTime.Now;
//Sets file path equal to the path value stored in the current image of lImageSet
filePath = img.getPath();
//Writes out to the console indicating what's been stored in filePath
Console.WriteLine("Image Path: {0}", filePath);
//Reads in the contents of the file
newFile.Content = System.IO.File.ReadAllBytes(filePath);
//Sets the file name equal to the name value stored in the current image of lImageSet
fileName = img.getName() + ".jpeg";
//Sets the URL path for the file
newFile.Url = fileName;
//Creates a List object of type String
keywords = new System.Collections.Generic.List<string>();
//For each keyword in the current image stored in lImageSet...
foreach (var keyword in img.lTags)
{
//...add that keyword to the newly created list
keywords.Add(keyword);
}
//Uploads the file to the picture library
Microsoft.SharePoint.Client.File uploadFile = list.RootFolder.Files.Add(newFile);
//Loads uploadFile method
clientContext.Load(uploadFile);
//Executes previous query
clientContext.ExecuteQuery();
//Calls the updateFields method to update the associated fields of the most recently uploaded image
updateFields();
end = DateTime.Now;
TimeSpan span = end.Subtract(start);
//Writes out to the console to indicate the file has finished being uploaded
Console.WriteLine("Uploaded: {0}", fileName + " Done!");
Console.WriteLine("Time Elapsed: {0}", span.Seconds + "seconds");
}
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
}
}
private static void sharepointLogin()
{
try
{
//Loads credentials needed for authentication
clientContext.Credentials = credentials;
//Loads the site
clientContext.Load(site);
//Loads the site list
clientContext.Load(list);
//Executes the previous queries on the server
clientContext.ExecuteQuery();
//Writes out the title of the SharePoint site to the console
Console.WriteLine("Title: {0}", site.Title);
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
}
}
}
Right now, I have to do everything remotely using the client-object model. I can't use SharePoint.Administration to change the max upload size. So does anyone know how, using the client-object model I can get past this problem of not being able to upload files greater than 4MB? Thank you in advance for any help!
This is because of the WCF limit for the client object model. You need to run this on the server from a SharePoint management shell with admin rights:
SPWebService contentService = SPWebService.ContentService;
contentService.ClientRequestServiceSettings.MaxReceivedMessageSize = int.MaxValue; // 2GB
contentService.Update();
More info here
Try to use the SaveBinaryDirect method. The SaveBinaryDirect method use Web Based Distributed Authoring and Versioning (WebDAV) for uploading and downloading files. Without building your own custom WCF service, WebDAV
is the most efficient way to upload and download files.
using (FileStream lp_fs = new FileStream(is_FileToImport, FileMode.OpenOrCreate))
{
Microsoft.SharePoint.Client.File.SaveBinaryDirect(lp_context, lp_uri.LocalPath, lp_fs, true);
}
Microsoft.SharePoint.Client.File lp_newFile = lp_web.GetFileByServerRelativeUrl(lp_uri.LocalPath);
lp_context.Load(lp_newFile);
lp_context.ExecuteQuery();
//check out to make sure not to create multiple versions
lp_newFile.CheckOut();
ListItem lp_item = lp_newFile.ListItemAllFields;
listItem["Created"] = info.SourceFile.CreationTime;
listItem["Modified"] = info.SourceFile.LastWriteTime;
listItem.Update();
// use OverwriteCheckIn type to make sure not to create multiple versions
lp_newFile.CheckIn(string.Empty, CheckinType.OverwriteCheckIn);
Related
I'm attempting to programmatically download all files from a document folder on a Sharepoint 2007 site. So far, I'm able to connect to the site, but am having issues connecting to the folders and download them.
try{
using(SPSite site = new SPSite("http://mysharepointserver/sites/subsite")){
using(SPWeb web = site.OpenWeb()){
Console.Write("Connected to site");
SPFolder testFolder = web.Folder["testFolder"];
//example method downloading folder
downloadFolder(testFolder);
}
}
}
catch(Exception e){
Log(e.ToString());
}
My console write works,so I know I am connecting to the site correctly.
My log file outputs:
System.ArgumentException: Value does not fall within the expected range.
at Microsoft.SharePoint.SPListCollection.GetListByName(String strListName, Boolean bThrowException)
at Microsoft.SharePoint.SPListCollection.get_Item(String strListName)
I also attempted to print out the following:
using(SPWeb web = site.OpenWeb()){
Console.Write("Connected to site");
Console.Write(web.lists);
SPFolder testFolder = web.Folder["testFolder"];
//example method downloading folder
downloadFolder(testFolder);
}
Which outputs the following to console:
Connected to site
Microsoft.SharePoint.SPListCollection
But I'm not certain how to navigate through SPListCollection to retrieve my folder "testFolder"
Any help would be appreciated. Thanks!
When you connect to share point site, there are different types of libraries. Library that contains documents and folders is DocumentLibrary and not ListLibrary. Once you have the item / library by ID, cast it to correct SPDocumentLibrary to retrieve items you want.
Use https://learn.microsoft.com/en-us/dotnet/api/microsoft.sharepoint.spdocumentlibrary?view=sharepoint-server to get different methods and properties of DocumentLibrary to retrieve the testFolder.
Example of accessing document library item from :https://social.msdn.microsoft.com/Forums/en-US/5ee7fb55-5d90-4d28-8990-bf00479f891f/how-to-get-spdocumentlibrary?forum=sharepointdevelopmentprevious
SPSite siteCollection = this.Site;
SPWeb site = this.Web;
// obtain query string values
string ListId = Request.QueryString["ListId"];
string ItemId = Request.QueryString["ItemId"];
// create list object and list item object
SPList list = site.Lists[new Guid(ListId)];
SPListItem item = list.Items.GetItemById(Convert.ToInt32(ItemId));
// query for information about list and list item
string ListTitle = list.Title;
string ItemTitle = item.Title;
if (list is SPDocumentLibrary) {
SPDocumentLibrary documentLibrary = (SPDocumentLibrary)list;
string DocumentTemplateUrl = documentLibrary.DocumentTemplateUrl;
SPFile file = item.File;
string FileAuthor = file.Author.Name;
string FileSize = file.TotalLength.ToString("0,###") + " bits";
}
I wrote a program using CSOM to upload documents to SharePoint and insert metadata to the properties. once a while(like every 3 months) the SharePoint server gets busy or we reset IIS or any other communication problem that it may have, we get "The operation has timed out" error on clientContext.ExecuteQuery(). To resolve the issue I wrote an extension method for ExecuteQuery to try every 10 seconds for 5 times to connect to the server and execute the query. My code works in the Dev and QA environment without any problem but in Prod, when it fails the first time with timeout error, in the second attempt, it only uploads the document but it doesn't update the properties and all the properties are empty in the library. It doesn't return any error as result of ExecteQuery() but It seems from the two requests in the batch witch are uploading the file and updating the properties, it just does uploading and I don't know what happens to the properties. It kinda removes that from the batch in the second attempt!
I used both upload methods docs.RootFolder.Files.Add and File.SaveBinaryDirect in different parts of my code but I copy just one of them here so you can see what I have in my code.
I appreciate your help.
public static void ExecuteSharePointQuery(ClientContext context)
{
int cnt = 0;
bool isExecute = false;
while (cnt < 5)
{
try
{
context.ExecuteQuery();
isExecute = true;
break;
}
catch (Exception ex)
{
cnt++;
Logger.Error(string.Format("Communication attempt with SharePoint failed. Attempt {0}", cnt));
Logger.Error(ex.Message);
Thread.Sleep(10000);
if (cnt == 5 && isExecute == false)
{
Logger.Error(string.Format("Couldn't execute the query in SharePoint."));
Logger.Error(ex.Message);
throw;
}
}
}
}
public static void UploadSPFileWithProperties(string siteURL, string listTitle, FieldMapper item)
{
Logger.Info(string.Format("Uploading to SharePoint: {0}", item.pdfPath));
using (ClientContext clientContext = new ClientContext(siteURL))
{
using (FileStream fs = new FileStream(item.pdfPath, FileMode.Open))
{
try
{
FileCreationInformation fileCreationInformation = new FileCreationInformation();
fileCreationInformation.ContentStream = fs;
fileCreationInformation.Url = Path.GetFileName(item.pdfPath);
fileCreationInformation.Overwrite = true;
List docs = clientContext.Web.Lists.GetByTitle(listTitle);
Microsoft.SharePoint.Client.File uploadFile = docs.RootFolder.Files.Add(fileCreationInformation);
uploadFile.CheckOut();
//Update the metadata
ListItem listItem = uploadFile.ListItemAllFields;
//Set field values on item
foreach (List<string> list in item.fieldMappings)
{
if (list[FieldMapper.SP_VALUE_INDEX] != null)
{
TrySet(ref listItem, list[FieldMapper.SP_FIELD_NAME_INDEX], (FieldType)Enum.Parse(typeof(FieldType), list[FieldMapper.SP_TYPE_INDEX]), list[FieldMapper.SP_VALUE_INDEX]);
}
}
listItem.Update();
uploadFile.CheckIn(string.Empty, CheckinType.OverwriteCheckIn);
SharePointUtilities.ExecuteSharePointQuery(clientContext);
}
catch (Exception ex)
{
}
}
}
}
There's too many possible reasons for me to really comment on a solution, especially considering it's only on the prod environment.
What I can say is that it's probably easiest to keep a reference to the last uploaded file. If your code fails then check if the last file has been uploaded correctly.
Side note: I'm not sure if this is relevant but if it's a large file you want to upload it in slices.
This is probably very simple but I am extremely new to coding anything, sorry in advance.
Currently I have a button4 that will read through my inbox for messages with a certain subject, if condition is met it displays the messages first class properties in a listview but I want it to also download the link found in each email.
It is a .zip link that when the link is clicked from inside the email it will download the zip. I want it to automatically download all links found when button4 is clicked.
I will show my button4 code and then an example of what the email is.
button4 code:
private void button4_Click(object sender, EventArgs e)
{
EmailConnect();
TimeSpan ts = new TimeSpan(0, -2, 0, 0);
DateTime date = DateTime.Now.Add(ts);
SearchFilter.IsGreaterThanOrEqualTo filter = new SearchFilter.IsGreaterThanOrEqualTo(ItemSchema.DateTimeReceived, date);
if (service != null)
{
FindItemsResults<Item> findResults = service.FindItems(WellKnownFolderName.Inbox, filter, new ItemView(50));
foreach (Item item in findResults)
{
EmailMessage message = EmailMessage.Bind(service, item.Id);
string subject = message.Subject.ToString();
if (subject.Contains("NFIRS File Validation"))
{
ListViewItem listitem = new ListViewItem(new[]
{message.DateTimeReceived.ToString(), message.From.Name.ToString() + "(" + message.From.Address.ToString() + ")", message.Subject, ((message.HasAttachments) ? "Yes" : "No")});
lstMsg.Items.Add(listitem);
}
}
if (findResults.Items.Count <= 0)
{
lstMsg.Items.Add("No Messages found!!");
}
}
}
Example email:
NFIRS File Validation
The NFIRS File Validation service has completed processing your files. Please follow this link to retrieve the zip file containing your results.
https://www.nfirs.fema.gov/biarchive/xxxxxxxxx_xxxxxxxxx.zip
This file will be deleted after 28 days.
If you have any questions, please do not reply to this email. Instead, please contact the NFIRS Support Center.
This is basically a duplicate of the link #DonBoitnott commented the only extra steps I am taking is putting the body of each email into a property list parsing it and making sure it saves as the same filename as the URL had in the original email
private void handleLinks(List<EmailProperties> properties)
{
using (WebClient client = new WebClient())
{
foreach (var prop in properties)
{
string link = searchForLink(prop.Body);
string fileName = MyExtensions.Between(link, "https://www.nfirs.fema.gov/biarchive/", ".zip");
string saveTo = string.Format((#"C:\Users\Foo\Downloads\{0}.zip"), fileName);
prop.Name = fileName;
client.DownloadFile(link, saveTo);
}
}
}
private string searchForLink(string body)
{
return MyExtensions.Between(body, "results.\r\n\r\n", "\r\n\r\nThis file will");
}
I'm trying to download a text file which contains a lot of domains:
private void bgWorker_DoWork(object sender, DoWorkEventArgs e)
{
// backgroundworker
string action = e.Argument as string;
if (action == "xc_download")
{
// this downloads the daily domains from the main site
// format the date to append to the url
DateTime theDate = DateTime.Now;
theDate.ToString("yyyy-MM-dd");
string downloadURL = String.Empty;
downloadURL = ("http://www.namejet.com/Download/" + (theDate.ToString("M-dd-yyyy") + ".txt"));
using (WebClient wc = new WebClient())
{
string urls = wc.DownloadString(downloadURL);
dgView.Rows.Add(urls);
}
} // if (action == "xc_download")
}
Once downloaded, I'm trying to add them to a datagrid. The problem is, this is very very slow. Is there a faster way to download text files and add the data to the gridview I should be using?
You could parallelize the download process. One option is TPL's Parallel.ForEach. You can set the maximum number of concurrent actions (downloads) to prevent the servers from being flooded.
List<string> downloads = new List<string>();
downloads.Add(...);
Parallel.ForEach
( downloads
, new ParallelOptions() { MaxDegreeOfParallelism = 8 } /* max 8 downloads simultaneously */
, url => Download(url)
);
Then create a download method and handle the downloading in there:
private void Download(string url)
{
// download
this.Invoke((MethodInvoker)delegate()
{
// update the UI inside here
});
}
I'm working with a Visual WebPart and I want to upload a image from my FileUpload control to a SharePoint list. This is the code I'm using but I can't get it to work, (Title, prodnum, color, etc is working, but not image). I've also installed SparQube.
This is my ListView:
.
protected void Button_Save_Click(object sender, EventArgs e)
{
SPSite currentSite = SPContext.Current.Site;
SPList myList = currentSite.RootWeb.Lists.TryGetList("SharePointDatabase");
try
{
if (myList != null && FileUpload_Pic.PostedFile != null && FileUpload_Pic.HasFile)
{
SPListItem listItem = myList.Items.Add();
listItem["Title"] = TextBox_Name.Text;
listItem["ProductNumber"] = TextBox_ProdNum.Text;
listItem["Color"] = TextBox_Color.Text;
listItem["ListPrice"] = TextBox_ListPrice.Text;
listItem["MoreInformation"] = TextBox_MoreInfo.Text;
string fileName = Path.GetFileName(FileUpload_Pic.PostedFile.FileName);
listItem["Image"] = fileName;
listItem.Update();
TextBox_Search.Text = string.Empty;
TextBox_Name.Text = string.Empty;
TextBox_MoreInfo.Text = string.Empty;
TextBox_ProdNum.Text = string.Empty;
TextBox_Color.Text = string.Empty;
TextBox_ListPrice.Text = string.Empty;
Label_Exception.Text = "Saved to Database list.";
Dispose();
}
}
catch (Exception x)
{
Label_Exception.Text = x.Message;
}
}
You can add file as stream directly from File Upload to specific web using following method and then add the file path to the list as shown in below example,
SPContext.Current.Web.Files.Add(String.Concat(SPContext.Current.Web.Site.RootWeb.Url, path), stream, true);
path is relative path for the image. in user case file name.
stream can get using FileUpload.FileContent in file upload control
Then add this path to the list as below.
listItem["Image"] = path;
This is work for all the browsers.
Are you sure the SPList has Atachment Upload enabled?
I had made the expierience that the normal ribbon button isn't greyed out although the list is not able to hold itemattachments.
SPList list = new SPList(); // Sample
list.EnableAttachments = false; //set
bool attachmentsAllowed = list.EnableAttachments; //get
What Type of field is "Image"? It's not the attachments folder...
An other way to save your Image is to upload it to a library and use a "Hyperlink" field.
Use this as help: http://www.sharepoint-tips.com/2007/10/code-practices-gettingsetting-values.html
Check out this question: https://sharepoint.stackexchange.com/questions/49481/exception-when-adding-attachment/49485
There are some problems, when you are using FileUpload control in standard edit/add pages.