I want to send file attachment from fileupload into smartsheet. I was using sdk, I found a sample code for attachment.
This my code for attachment:
if (fileUpload.HasFile)
{
string fileName = fileUpload.PostedFile.FileName;
string sourceFile = Server.MapPath("~/")+fileName;
fileUpload.PostedFile.SaveAs(sourceFile);
string type = fileUpload.PostedFile.ContentType;
smartsheet.Sheets().Attachments().AttachFile(sheetId, sourceFile, type);
}
I read about AttachFile() method, must using ObjectId, but I do not understand how to get ObjectId, so I use sheetId.
Edit:
That code was correct, when I running my code, I found file attachment in tab attachment on my sheet, but I want to attach that file into new row I added.
Can you help me to solve this?
I still new to use smartsheet, and still learning how to use smartsheet api sdk c#, but I not found many example or sample code to use smartsheet api.
Here my full code:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using Smartsheet.Api;
using Smartsheet.Api.Models;
using Smartsheet.Api.OAuth;
namespace smartsheet
{
public partial class TestInput : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
}
protected void btnSubmit_Click(object sender, EventArgs e)
{
try
{
string strToken = "649qjpt7pmq8i0xze5550hr12x";
long sheetId = 4204618734430084;
Token token = new Token();
token.AccessToken = strToken;
SmartsheetClient smartsheet = new SmartsheetBuilder().SetAccessToken(token.AccessToken).Build();
Smartsheet.Api.Models.Home home = smartsheet.Home().GetHome(new ObjectInclusion[] { ObjectInclusion.TEMPLATES });
List<Column> cols = new List<Column>(smartsheet.Sheets().Columns().ListColumns(sheetId));
Cell cell1 = new Cell();
cell1.ColumnId = cols[0].ID;
cell1.Value = txFirstname.Text;
Cell cell2 = new Cell();
cell2.ColumnId = cols[1].ID;
cell2.Value = txLastname.Text;
List<Cell> cells = new List<Cell>();
cells.Add(cell1);
cells.Add(cell2);
Row row = new Row();
row.Cells=cells;
List<Row> rows = new List<Row>();
rows.Add(row);
RowWrapper rowWrapper = new RowWrapper.InsertRowsBuilder().SetRows(rows).SetToBottom(true).Build();
smartsheet.Sheets().Rows().InsertRows(sheetId, rowWrapper);
if (fileUpload.HasFile)
{
string fileName = fileUpload.PostedFile.FileName;
string sourceFile = Server.MapPath("~/")+fileName;
fileUpload.PostedFile.SaveAs(sourceFile);
string type = fileUpload.PostedFile.ContentType;
smartsheet.Sheets().Attachments().AttachFile(sheetId, sourceFile, type);
}
}
catch (Exception ex)
{
LableMsg.Text = ex.Message.ToString();
}
}
}
}
I'm not a C# developer, but I am very familiar with Smartsheet's Java SDK, which C# SDK was modeled after, so there may be some slight syntax problems with the answer below, but it should give you the gist of things.
You are almost there - to attach a file to a new row, you'll just need to get the id of the new row you just created and then attach to it instead of the sheet. Change your line:
smartsheet.Sheets().Rows().InsertRows(sheetId, rowWrapper);
to
IList<Row> insertedRow = smartsheet.Sheets().Rows().InsertRows(sheetId, rowWrapper);
long newRowId = insertedRow.get(0).ID;
Now you can attach directly to that row:
smartsheet.Rows().Attachments().AttachFile(newRowId, sourceFile, type);
Related
The application I am working on must take every link saved in an excel and run it, and in case the page status is 404, I have to save each link in a new excel (by that I mean in a excel to be all, not each link in a separate excel).
So far I've managed to get every link to run, as well as create a new excel in case I manage to make it work, but unfortunately I'm stuck at this point where I don't know how or what to write in the code for save every time the status is 404 after checking the URLs in the existing Excel
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
using Microsoft.Office.Interop.Excel;
using OfficeOpenXml;
namespace StartaproUrls
{
partial class Program
{
private static int statusCode;
private static HttpResponseMessage result;
private static object[,] valueArray;
static async Task Main(string[] args)
{
using var client = new HttpClient();
string path = #"C:\Users\stefanv\Downloads\startapro_urls.xlsx";
Application excelApp = new();
if (excelApp != null)
{
Workbook excelWorkbook = excelApp.Workbooks.Open(path);
Worksheet excelWorksheet = (Worksheet)excelWorkbook.Sheets[1];
Microsoft.Office.Interop.Excel.Range excelRange = excelWorksheet.UsedRange;
int rowCount = excelRange.Rows.Count;
int colCount = excelRange.Columns.Count;
valueArray = (object[,])excelRange.get_Value(
XlRangeValueDataType.xlRangeValueDefault);
for (int i = 1; i <= 20; i++)
{
for (int j = 1; j <= colCount; j++)
{
result = await client.SendAsync(new HttpRequestMessage(HttpMethod.Head, $"https://beta.startapro.hu{valueArray[i, j]}"));
statusCode = (int)result.StatusCode;
Console.WriteLine(statusCode); // or result
}
}
excelWorkbook.Close();
excelApp.Quit();
}
ExcelPackage.LicenseContext = LicenseContext.NonCommercial;
var file = new FileInfo(#"C:\Users\stefanv\Downloads\startapro_urlsError.xlsx");
var urlError = GetSetupData();
await CreateExcelForErrorUrls.SaveExcelFile(urlError, file);
}
public static List<UrlErrorModel> GetSetupData()
{
List<UrlErrorModel> output = new()
{
new() { UrlWithError = valueArray }
};
return output;
}
}
}
Bellow is the class for creating the Excel
using OfficeOpenXml;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static StartaproUrls.Program;
namespace StartaproUrls
{
class CreateExcelForErrorUrls
{
public static async Task SaveExcelFile(List<UrlErrorModel> urlError, FileInfo file)
{
DeleteIFExists(file);
using var package = new ExcelPackage(file);
var ws = package.Workbook.Worksheets.Add("ErrorRaport");
var range = ws.Cells["A1"].LoadFromCollection(urlError, true);
range.AutoFitColumns();
await package.SaveAsync();
}
private static void DeleteIFExists(FileInfo file)
{
if (file.Exists)
{
file.Delete();
}
}
}
}
And Bellow is the Model for UrlError
namespace StartaproUrls
{
partial class Program
{
public class UrlErrorModel
{
public object UrlWithError { get; set; }
}
}
}
I have a Windows Application where in MainWindow.xaml.cs I have a button click option to import, which goes off to run Main.Run, based on a spreadsheet that was dragged into a text box:
private void Btn_Import_Click(object sender, RoutedEventArgs e)
{
Main.Run(Global.bindingObjects.spreadsheetTxtBxFile);
}
The above code takes us to a Task, which should then go off to run DefunctRun in a different project, but it doesn't go to it when using a break point and F10 in debug:
internal static void Run(string spreadsheetpath)
{
Task task = new Task(
() =>
{
try
{
PICSObjects.DefunctFields.DefunctRun(spreadsheetpath);
}
finally
{
}
}
);
task.Start();
}
The code that it should go off and perform, which I need to have the spreadsheet path turned into a dataset which this class should do if it can be accessed.:
using InfExcelExtension;
using System.Data;
using System.IO;
using System.Diagnostics;
namespace PICSObjects
{
public static partial class DefunctFields
{
public static void DefunctRun(string spreadsheetpath)
{
//Sets up string values to be used later//
DataSet origdata = new DataSet();
DataSet newdata = new DataSet();
string filespath = #"";
string output = #".xlsx".ToString();
string searchtext = "";
string currentscript = "";
string readscript = "";
//Converts the above Path string (which should be a spreadsheet) into a dataset with
datatables//
origdata = ExcelToDataset.ToDataSet(spreadsheetpath);
//Sets up two new tables in new dataset//
newdata.Tables.Add("Scripts");
newdata.Tables.Add("Tables");
//Add columns to the new tables//
newdata.Tables["Scripts"].Columns.Add("ToRemove");
newdata.Tables["Scripts"].Columns.Add("ScriptName");
newdata.Tables["Scripts"].Columns.Add("RelatedTable");
newdata.Tables["Tables"].Columns.Add("TableName");
newdata.Tables["Tables"].Columns.Add("ScriptName");
//Sets the directory to browse in from the filespath specified at the top//
DirectoryInfo d = new DirectoryInfo(filespath.ToString());
//Goes through each file in specified directory that has .sql as the extension//
foreach (var file in d.GetFiles("*.sql"))
{
currentscript = file.Name.ToString();
readscript = File.ReadAllText(file.FullName).ToLower();
//Goes through each "Field" value from the column and sets to Lower Case//
foreach (DataRow dr in origdata.Tables["Fields"].Rows)
{
searchtext = dr["ToRemove"].ToString().ToLower();
//If the Field value appears in the file it's currently looking at, it'll put it
into our new dataset's new datatable//
if (readscript.Contains(searchtext))
{
DataRow row = newdata.Tables["Scripts"].NewRow();
row["ToRemove"] = searchtext;
row["ScriptName"] = currentscript;
row["RelatedTable"] = dr["Table"];
newdata.Tables["Scripts"].Rows.Add(row);
}
}
//Whilst going through the files in the specified folder, we also look at what tables from origdata that are mentioned in the files as these are the defunct tables and need flagging//
foreach (DataRow dr in origdata.Tables["Tables"].Rows)
{
searchtext = dr["Tables"].ToString();
if (readscript.Contains(searchtext))
{
DataRow row = newdata.Tables["Tables"].NewRow();
row["TableName"] = searchtext;
row["ScriptName"] = currentscript;
newdata.Tables["Tables"].Rows.Add(row);
}
}
}
newdata.ToWorkBook(output);
Process.Start(output);
}
}
}
Ive sorted this one now. Just needed a fair bit of tweaking, based on everyone's comments
I have a huge .csv file, to be specific a .TAB file with 29 million rows and the file size is around 600 MB. I would need to read this into an IEnumerable collection.
I have tried CsvHelper, GenericParser, and few other solutions but always ending up with an Out of Memory exception
Please suggest a way to do this
I have tried
var deliveryPoints = new List<Point>();
using (TextReader csvreader1 = File.OpenText(#"C:\testfile\Prod\PCDP1705.TAB")) //StreamReader csvreader1 = new StreamReader(#"C:\testfile\Prod\PCDP1705.TAB"))
using (var csvR1 = new CsvReader(csvreader1, csvconfig))
{
csvR1.Configuration.RegisterClassMap<DeliveryMap>();
deliveryPoints = csvR1.GetRecords<Point>().ToList();
}
using (GenericParser parser = new GenericParser())
{
parser.SetDataSource(#"C:\testfile\Prod\PCDP1705.TAB");
parser.ColumnDelimiter = '\t';
parser.FirstRowHasHeader = false;
//parser.SkipStartingDataRows = 10;
//parser.MaxBufferSize = 4096;
//parser.MaxRows = 500;
parser.TextQualifier = '\"';
while (parser.Read())
{
var address = new Point();
address.PostCodeID = int.Parse(parser[0]);
address.DPS = parser[1];
address.OrganisationFlag = parser[2];
deliveryPoints.Add(address);
}
}
and
var deliveryPoints = new List<Point>();
csvreader = new StreamReader(#"C:\testfile\Prod\PCDP1705.TAB");
csv = new CsvReader(csvreader, csvconfig);
while (csv.Read())
{
var address = new Point();
address.PostCodeID = int.Parse(csv.GetField(0));
address.DPS = csv.GetField(1);
deliveryPoints.Add(address);
}
The problem is that you are loading entire file into memory. You can compile your code to x64 which will increase memory limit for your program rapidly, but it is not recommended if you can avoid loading entire file into memory.
Notice that calling ToList() forces the CsvReader to load entire file into memory at once:
csvR1.GetRecords<Point>().ToList();
But this will load only one line at a time:
foreach(var record in csvR1.GetRecords<Point>())
{
//do whatever with the single record
}
This way you can process files of unlimited size
No need to use 3rd party software. Use Net Library methods
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.Data;
namespace ConsoleApplication1
{
class Program
{
static void Main(string[] args)
{
StreamReader csvreader = new StreamReader(#"C:\testfile\Prod\PCDP1705.TAB");
string inputLine = "";
while ((inputLine = csvreader.ReadLine()) != null)
{
var address = new Point();
string[] csvArray = inputLine.Split(new char[] { ',' });
address.postCodeID = int.Parse(csvArray[0]);
address.DPS = csvArray[1];
Point.deliveryPoints.Add(address);
}
//add data to datatable
DataTable dt = new DataTable();
dt.Columns.Add("Post Code", typeof(int));
dt.Columns.Add("DPS", typeof(string));
foreach (Point point in Point.deliveryPoints)
{
dt.Rows.Add(new object[] { point.postCodeID, point.DPS });
}
}
}
public class Point
{
public static List<Point> deliveryPoints = new List<Point>();
public int postCodeID { get; set; }
public string DPS { get; set; }
}
}
It worked by running in x64 mode, and by adding
<gcAllowVeryLargeObjects enabled="true" /> in app.config.
Hi please can anyone give me solution to this problem,i have to import csv file using c# but i have this problem in this screenshot
Screen
the separate betwenn column is ',' but in the data there is a rows tha contains ".
Mohamed, I cannot see your screenshot, but can point you toward generic lists and creating a class to represent data. You will need to add references from the "Project" menu.
Microsoft.VisualBasic
System.Configuration
WindowsBase
I am including code from a snippet of code where I was doing that:
using System;
using System.IO;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Microsoft.VisualBasic.FileIO;
namespace CsvToListExp
{
class Program
{
public static void Main(string[] args)
{
// HARD_CODED FOR EXAMPLE ONLY - TO BE RETRIEVED FROM APP.CONFIG IN REAL PROGRAM
string hospPath = #"C:\\events\\inbound\\OBLEN_COB_Active_Inv_Advi_Daily_.csv";
string vendPath = #"C:\\events\\outbound\\Advi_OBlen_Active_Inv_Ack_Daily_.csv";
List<DenialRecord> hospList = new List<DenialRecord>();
List<DenialRecord> vendList = new List<DenialRecord>();
//List<DenialRecord> hospExcpt = new List<DenialRecord>(); // Created at point of use for now
//List<DenialRecord> vendExcpt = new List<DenialRecord>(); // Created at point of use for now
using (TextFieldParser hospParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(hospPath))
{
hospParser.TextFieldType = FieldType.Delimited;
hospParser.SetDelimiters(",");
hospParser.HasFieldsEnclosedInQuotes = false;
hospParser.TrimWhiteSpace = true;
while (!hospParser.EndOfData)
{
try
{
string[] row = hospParser.ReadFields();
if (row.Length <= 7)
{
DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]);
hospList.Add(dr);
}
}
catch (Exception e)
{
// do something
Console.WriteLine("Error is: {0}", e.ToString());
}
}
hospParser.Close();
hospParser.Dispose();
}
using (TextFieldParser vendParser = new Microsoft.VisualBasic.FileIO.TextFieldParser(vendPath))
{
vendParser.TextFieldType = FieldType.Delimited;
vendParser.SetDelimiters(",");
vendParser.HasFieldsEnclosedInQuotes = false;
vendParser.TrimWhiteSpace = true;
while (!vendParser.EndOfData)
{
try
{
string[] row = vendParser.ReadFields();
if (row.Length <= 7)
{
DenialRecord dr = new DenialRecord(row[0], row[1], row[2], row[3], row[4], row[5], row[6]);
vendList.Add(dr);
}
}
catch (Exception e)
{
// do something
Console.WriteLine("Error is: {0}", e.ToString());
}
}
vendParser.Close();
vendParser.Dispose();
}
// Compare the lists each way for denials not in the other source
List<DenialRecord> hospExcpt = hospList.Except(vendList).ToList();
List<DenialRecord> vendExcpt = vendList.Except(hospList).ToList();
}
}
}
Google TestFieldParser and look at the methods, properties and constructors. It is very versatile, but runs slowly due to the layers it goes through. It has the ability to set the delimiter, handle fields wrapped in quotes, trim whitespace and many more.
I have database with EmpNo(Int) and EmpImage(Image) columns.
I am using HttpHandler to display the Images.
I am storing Images both in database and folder.
Now I want to change the names of Images in folder as names of EmpNo whose I didn't change while uploading.
So need to fetch the Images names from database to compare them with the Image names in the folder and rename them.
How can i fetch or extract the image names from the binary data that i get from database using generic handler.
I have attached the code In handler for reference.
using System;
using System.Web;
using System.Data;
using System.Data.SqlClient;
public class Lab_14___ImageFetchingHandler : IHttpHandler
{
public void ProcessRequest(HttpContext context)
{
SqlConnection vConn = new SqlConnection("server=localhost; database=Asp.netDemoWebsiteDatabase; Integrated Security = SSPI;");
vConn.Open();
String vQuery = "Select EmpImage from EmpImages where Empno=#id";
SqlCommand vComm = new SqlCommand(vQuery, vConn);
//Receive the Id from some Form
String vId = context.Request.QueryString["id"];
vComm.Parameters.AddWithValue("#id", vId);
SqlDataReader vDr = vComm.ExecuteReader();
while (vDr.Read())
{
context.Response.ContentType = "image/jpg";
context.Response.BinaryWrite((byte[])vDr["EmpImage"]);
[ Here I need the Images names to store in List or array.How?? ]
}
vConn.Close();
}
public bool IsReusable
{
get
{
return false;
}
}
}
Here are different ways to inspect image metadata.
Byte[] content = (Byte[])vDr["EmpImage"]
//Option 1
Image img = new Bitmap(new MemoryStream(content));
Encoding _Encoding = Encoding.UTF8;
var props = img.PropertyItems;
string propData;
foreach (var propertyItem in props)
{
propData = _Encoding.GetString(propertyItem.Value);
Debug.WriteLine("{0}[{1}]", propertyItem.Id, propData);
}
//option 2 - require reference of PresentationCore and WindowsBase and then using System.Windows.Media.Imaging
var imgFrame = BitmapFrame.Create(new MemoryStream(content));
var metadata = imgFrame.Metadata as BitmapMetadata;
//option 3 - require MetadataExtractor Nuget package
var mr = ImageMetadataReader.ReadMetadata(new MemoryStream(content));
foreach (var directory in mr)
{
foreach (var tag in directory.Tags)
{
Debug.WriteLine("{0} - {1} = {2}]", directory.Name, tag.Name, tag.Description);
}
}