I'm working on a C# / ASP.NET flipbook project using pdf.js.
When I launch my application having an empty cache in Chrome, I load once my pdf file and everything is fine, but when I refresh the page without clearing it, the pdf is entirely downloaded 5 times, so the size goes from 7,5Mo to 37,5Mo.
I checked with IE 11, Edge, and FireFox, and I don't have issues with them.
I have already tried to disable the cache following these instructions, but it didn't solve my problem.
This is where I call my file :
function loadBook() {
PDFJS.disableWorker = true;
PDFJS.workerSrc = '../../Scripts/pdf.worker.js';
$(document).ready(function () {
var file = "http://localhost:51790/pdf/newspaper.pdf";
PDFJS.getDocument(file).then(function getPdf(pdf) {
var pagesNumber = pdf.numPages;
//Adding canvas to the html
for (var i = 1; i <= pagesNumber; i++) {
$('.flipbook').html($('.flipbook').html() + '<div><canvas id="page' + i + '"></canvas></div>');
}
//Adding and rendering pages
for (var i = 1; i <= pagesNumber; i++) {
renderPage(i, document.getElementById('page' + i), pdf);
}
//Creating flipbook
loadJournal();
});
})
}
And this is my function for the render :
function renderPage(numero, pageElement, pdf) {
pdf.getPage(numero).then(function (page) {
var scale = 1;
var viewport = page.getViewport(scale);
pageElement.width = viewport.width;
pageElement.height = viewport.height;
var renderContext = {
canvasContext: pageElement.getContext('2d'),
viewport: viewport
};
page.render(renderContext);
});
}
I'd like to understand why it react like this.
Related
Is there a way you can detect in C# if a browser is blocking external marketing
tracking information like fbq?
In JQuery you can use typeof fbq !== 'undefined' to detect if the Facebook Pixel tracking code is blocked, For example:
if(typeof fbq !== 'undefined'){
fbq('track', 'ViewContent', {
value: FacebookPriceIncVat,
currency: FacebookInitiateCheckoutCurrency,
content_ids: FacebookSKUView,
content_type: 'product',
});
}
I have one module in which the Facebook Tracking code can only be generated on the back-end. Is there a way you can detect if fbq exist using perhaps ajax that changes a bool or some other way to detect if something is blocking links in your browser like a plugin of some sort?
The code below should not execute if fbq is not present.
if (FacebookPixelID != null)
{
EndUserFirstName = SessionManager.CurrentUserInfo.FirstName;
EndUserLastName = SessionManager.CurrentUserInfo.LastName;
EndUserEmail = SessionManager.CurrentUserInfo.UserName;
FacebookInitializationCodeID = FacebookPixelID;
string FacebookPixelResult = string.Empty;
using (var FBPxl = new StringWriter())
using (var FBCodescript = new HtmlTextWriter(FBPxl))
{
FBCodescript.AddAttribute(Attr.Type, "text/javascript");
FBCodescript.RenderBeginTag(Tag.Script);
//load ecommerce plugin
FBCodescript.WriteLine("!function(f,b,e,v,n,t,s){if (f.fbq) return; n = f.fbq = function(){n.callMethod?n.callMethod.apply(n, arguments):n.queue.push(arguments)};");
FBCodescript.WriteLine("if (!f._fbq) f._fbq = n; n.push = n; n.loaded = !0; n.version = '2.0';");
FBCodescript.WriteLine("n.queue =[]; t = b.createElement(e); t.async = !0;");
FBCodescript.WriteLine("t.src = v; s = b.getElementsByTagName(e)[0];");
FBCodescript.WriteLine("s.parentNode.insertBefore(t, s)}");
FBCodescript.WriteLine("(window, document,'script','https://connect.facebook.net/en_US/fbevents.js');");
FBCodescript.WriteLine($"fbq('init', '{FacebookInitializationCodeID}', {{ em: '{EndUserEmail}', fn: '{EndUserFirstName}', ln: '{EndUserLastName}',}});");
FBCodescript.WriteLine("fbq('track', 'PageView');");
FBCodescript.RenderEndTag();
FBCodescript.WriteLine($"<noscript><img height='1' width='1' style='display:none' src='https://www.facebook.com/tr?id={FacebookInitializationCodeID}&ev=PageView&noscript=1'/></noscript>");
FacebookPixelResult = FBCodescript.InnerWriter.ToString();
}
Page.ClientScript.RegisterStartupScript(this.GetType(), "myFacebookPixelInitialization", FacebookPixelResult, false);
//Facebook Pixels Purchase Code
string FacebookPixelPurchase = string.Empty;
if (RevenueCurrency != null)
{
FBPurchaseCurrency = RevenueCurrency;
}
else
{
FBPurchaseCurrency = "ZAR";
}
using (var FBPxlPurchase = new StringWriter())
using (var FBCodescriptPurchase = new HtmlTextWriter(FBPxlPurchase))
{
FBCodescriptPurchase.AddAttribute(Attr.Type, "text/javascript");
FBCodescriptPurchase.RenderBeginTag(Tag.Script);
//Write Facebook Pixel Purchase Event code
FBCodescriptPurchase.WriteLine("fbq('track', 'Purchase', {");
FBCodescriptPurchase.WriteLine($"value: {trans.Revenue},");
FBCodescriptPurchase.WriteLine($"currency: '{FBPurchaseCurrency}',");
FBCodescriptPurchase.WriteLine($"content_ids: '{string.Join(",", trans.LineItems.Select(l => l.SKU).ToArray())}',");
FBCodescriptPurchase.WriteLine("content_type: 'product',");
FBCodescriptPurchase.WriteLine($"contents: [");
var PurchaseCounter = 1;
foreach (var lineitem in trans.LineItems)
{
FBCodescriptPurchase.WriteLine("{");
FBCodescriptPurchase.WriteLine($"id: '{lineitem.SKU}',");
FBCodescriptPurchase.WriteLine($"quantity: {lineitem.Quantity},");
FBCodescriptPurchase.WriteLine($"item_price: {lineitem.UnitPrice}");
FBCodescriptPurchase.WriteLine("}");
if (PurchaseCounter != trans.LineItems.Count) FBCodescriptPurchase.WriteLine(",");
PurchaseCounter++;
}
FBCodescriptPurchase.WriteLine("]");
FBCodescriptPurchase.WriteLine("});");
FBCodescriptPurchase.RenderEndTag();
FacebookPixelPurchase = FBCodescriptPurchase.InnerWriter.ToString();
}
Page.ClientScript.RegisterStartupScript(this.GetType(), "myFacebookPixelPurchase", FacebookPixelPurchase, false);
}
}
I think it is actually easy to fix this.
Don't know why I did not think of this.
I can just add the if statement in the C# code.
I'm dynamically creating many images in a Web Forms application with data URI encoding (no HttpHandler). The images are loaded from a database and I'm filtering with Isotope and lazy loading. I'm using asynchronous tasks in the page load to fetch the data and load into the image controls.
I've created thumbnails and applied image compression as well. Regardless, page load is very slow. I have a white page until all content is loaded.
Am I handling the Async methods correctly? Shouldn't this allow the display of the other content on the page first?
Page Load Method
RegisterAsyncTask(new PageAsyncTask(LoadImages));
Load Images method:
public Task LoadImages()
{
//Get Data from DB
DataTable dtImages = GetData();
int id = 0;
foreach (DataRow row in dtImages.Rows)
{
id = Convert.ToInt32(row["FILE_ID"].ToString());
var category = row["CATEGORY"].ToString();
Byte[] bytes = ((Byte[])row["THUMBNAIL_CONTENT"]);
//Method to create the content on the page:
CreateDiv(id, bytes);
}
return Task.FromResult(0);
}
The method to create the HTML Generic Controls:
public void CreateDiv(int imageID, byte[] fileBytes)
{
System.Web.UI.WebControls.Image image = new System.Web.UI.WebControls.Image();
image.ID = "Image" + imageID.ToString() + "";
if (imageID != 0)
{
//My animated gif:
image.ImageUrl = "images/loader.gif"; ;
}
image.Attributes.Add("class", "img-fluid lazy");
//The actual thumbnail:
image.Attributes.Add("data-src", GetImage(fileBytes));
image.Visible = true;
PlaceHoldersDiv.Controls.Add(image);
}
Method to return the image URI:
public string GetImage(object img)
{
return "data:image/jpg;base64," + Convert.ToBase64String((byte[])img);
}
Lazy loading script:
document.addEventListener("DOMContentLoaded", function () {
var lazyloadImages = document.querySelectorAll("img.lazy");
var lazyloadThrottleTimeout;
function lazyload() {
if (lazyloadThrottleTimeout) {
clearTimeout(lazyloadThrottleTimeout);
}
lazyloadThrottleTimeout = setTimeout(function () {
var scrollTop = window.pageYOffset;
lazyloadImages.forEach(function (img) {
if (img.offsetTop < (window.innerHeight + scrollTop)) {
img.src = img.dataset.src;
img.classList.remove('lazy');
}
});
if (lazyloadImages.length == 0) {
document.removeEventListener("scroll", lazyload);
window.removeEventListener("resize", lazyload);
window.removeEventListener("orientationChange", lazyload);
}
}, 20);
}
document.addEventListener("scroll", lazyload);
window.addEventListener("resize", lazyload);
window.addEventListener("orientationChange", lazyload);
});
FYI, I tried using an Http Handler to load the images but performance was much worse!
When I send a Blob to a controller via ajax, saving the file in the controller causes a page refresh. If I don't save the file, it doesn't refresh. My code:
Update I spun up a new blank ASP.NET MVC 5 app, copied over the ajax code, and it still refreshes the page if I save file in controller.
controller:
public class PicUploadTestsController : Controller
{
public ActionResult StackOverflowSample()
{
return View();
}
[HttpPost]
public string UploadPic()
{
var length = Request.ContentLength;
var bytes = new byte[length];
Request.InputStream.Read(bytes, 0, length);
WebImage webImage = new WebImage(bytes);
var path = Path.Combine(Globals_mt.Instance.ServerRootPath, "Areas/SampleTests/Img/test.jpg");
//If I comment out the save, no reload happens
webImage.Save(path);
return "/Areas/SampleTests/Img/test.jpg";
}
}
typescript:
$(function () {
$('#blob-upload').change((e) => {
var input = <HTMLInputElement>e.target;
var file = input.files[0];
getBlob(file, (blob: Blob) => {
var xhr = new XMLHttpRequest();
xhr.open('post', '/SampleTests/PicUploadTests/UploadPic', true);
xhr.send(blob);
});
});
function getBlob(file: File, callback: Function) {
var filereader = new FileReader();
filereader.onloadend = () => {
var imageEl = new Image();
imageEl.onload = () => {
var width = imageEl.width;
var height = imageEl.height;
var canvasEl: HTMLCanvasElement = document.createElement('canvas');
var context: CanvasRenderingContext2D = canvasEl.getContext('2d');
canvasEl.width = width;
canvasEl.height = height;
context.drawImage(imageEl, 0, 0, width, height);
var blob: Blob = dataUrlToBlob(canvasEl.toDataURL(file.type));
callback(blob);
};
var result = filereader.result;
imageEl.src = result;
};
filereader.readAsDataURL(file);
}
function dataUrlToBlob(dataURL) {
var BASE64_MARKER = ';base64,';
if (dataURL.indexOf(BASE64_MARKER) == -1) {
var parts = dataURL.split(',');
var contentType = parts[0].split(':')[1];
var raw = parts[1];
return new Blob([raw], { type: contentType });
}
var parts = dataURL.split(BASE64_MARKER);
var contentType = parts[0].split(':')[1];
raw = window.atob(parts[1]);
var rawLength = raw.length;
var uInt8Array = new Uint8Array(rawLength);
for (var i = 0; i < rawLength; ++i) {
uInt8Array[i] = raw.charCodeAt(i);
}
return new Blob([uInt8Array], { type: contentType });
}
});
razor view:
#using (Html.BeginForm())
{
<label>ajax blob to FileReceiver()</label>
<input type="file" id="blob-upload" />
}
This happens no matter what I've tried, including uploading the file directly, saving either the file or Blob using file.SaveAs(), wrapping the blob in FormData(), moving the save operation out of the controller to a class library, not having a form in the view, i.e. just having an <input type="file" /> directly on the page, etc etc.
I'm using ASP.NET MVC 5, Visual Studio 2015
Am I missing something simple here? I had this working properly once upon a time.
Note: The reason I'm uploading a blob is because in the real code, I'm sizing the image down to a thumbnail so that I don't have to upload a 6mb file to create a 75 x proportionalHeight image.
Turns out the issue was with Mads Kristenson's Browser Link extension for Visual Studio, I disabled that and the issue was resolved
You need to return false from your JavaScript so that the page doesn't refresh.
$('#blob-upload').change((e) => {
var input = <HTMLInputElement>e.target;
var file = input.files[0];
getBlob(file, (blob: Blob) => {
var xhr = new XMLHttpRequest();
xhr.open('post', '/SampleTests/PicUploadTests/UploadPic', true);
xhr.send(blob);
});
return false;
});
i have problem with displaying image in my web app. It took photo from database, and should dispay in web app.
protected void btnShowPhoto_Click(object sender, EventArgs e)
{
string adresURL = #"~/Content";
string camPath = "";
string[] tab = new string[10];
CheckBox[] _boxes = new CheckBox[] { this.CheckBox1, this.CheckBox2, this.CheckBox3, this.CheckBox4, this.CheckBox5, this.CheckBox6, this.CheckBox7, this.CheckBox8 };
System.Web.UI.WebControls.Image[] _images = new System.Web.UI.WebControls.Image[] { this.Image1, this.Image2, this.Image3, this.Image4, this.Image5, this.Image6, this.Image7, this.Image8 };
Label[] _labels = new Label[] { this.lblCameraName1, this.lblCameraName2, this.lblCameraName3, this.lblCameraName4, this.lblCameraName5, this.lblCameraName6, this.lblCameraName7, this.lblCameraName8 };
System.Web.UI.HtmlControls.HtmlAnchor[] _linkscontrol = new System.Web.UI.HtmlControls.HtmlAnchor[] { this.imagelink1, this.imagelink2, this.imagelink3, this.imagelink4, this.imagelink5, this.imagelink6, this.imagelink7, this.imagelink8 };
for (int i = 0; i < 8; i++)
{
_images[i].Visible = false;
_labels[i].Visible = false;
_linkscontrol[i].HRef = "";
}
for (int i = 0; i < 8; i++)
{
if (_boxes[i].Checked)
{
camPath = null;
tab = null;
camPath = this.GridView2.Rows[i].Cells[0].Text;
tab = camPath.Split(new string[] { "StoredPhotos" }, StringSplitOptions.None);
//Virtual Path'a
camPath = adresURL + tab[1].Replace(#"\", "/");
_labels[i].Visible = true;
_labels[i].Text = this.GridView2.Rows[i].Cells[1].Text;
_linkscontrol[i].HRef = camPath;
_images[i].ImageUrl = camPath;
_images[i].Visible = true;
}
else
_images[i].Visible = false;
}
}
I have problem with my virtual path probably. CamPath(Virtual Path) becomes from : E:\Photo\StoredPhotos\20151010\000003819619_201512021335_1_C1, and finally looks: ~/20151010/000003819619_201512021335_1_C1
This path means nothing to a web browser:
~/20151010/000003819619_201512021335_1_C1
It doesn't know what to do with that ~ directory. That's a server-side concept, not a client-side concept. So your server-side code needs to resolve that to an actual path.
It could be as simple as just explicitly starting from the root of the server:
string adresURL = #"/Content";
So the resulting URL would start with /Content/..... and the browser would check for the image in that path.
But if the application isn't (or might not be) the root of the server domain then you'd need to either manually account for that or use a server-side helper of some sort. There are a variety of ways to go about that, for example:
_images[i].ImageUrl = System.Web.VirtualPathUtility.ToAbsolute(camPath);
The browser expect access to image via http protocol, if you want view the image you have 2 diffrerent way:
(simple) Create a virtual directory under iis that point to a phisical folder E:\Photo\StoredPhotos\ and called StoredPhotos, in _images[i].ImageUrl you may set the value /StoredPhotos/20151010/000003819619_201512021335_1_C1.jpg
(complex) Build a class that read the file on the disk and write it to the response (use IHttpHandler interface), add this handler to web.config and set _images[i].ImageUrl the value of 'NameOfHandler.aspx?20151010/000003819619_201512021335_1_C1
I have an MVC3 application that needs to generate large reports on a regular basis. The user can choose their criteria and launch the report. Right now I am opening a new tab/window with the javascript window.open() method. While the report is getting generated the user can not use the site. Everything waits till the report is generated.
The code for generating the report is:
private FileStreamResult doSpecReport(List<int> idProjItems)
{
PdfDocument outputDocument = new PdfDocument(); // returning to the user
foreach(var id in idProjItems)
{
var item = _entities.ProjectEquipmentItems.First(f => f.idProjectEquipmentItem == id);
var cutsheetPath = item.CutSheet;
Dictionary<string, string> dictionary = new Dictionary<string, string>();
dictionary.Add("p_idEquipmentItem", id.ToString());
var fs = GetReportHtml("NameOfReport", dictionary); // Returns FileStreamResult from crystal
var inputDocument1 = CompatiblePdfReader.Open(fs.FileStream); // add report to output doc
int count = inputDocument1.PageCount;
for(int idx = 0; idx < count; idx++)
{
PdfPage page = inputDocument1.Pages[idx];
outputDocument.AddPage(page);
}
if (!string.IsNullOrEmpty(cutsheetPath))
{
cutsheetPath = Path.Combine(Server.MapPath("~/Files/CutSheetFiles/"), cutsheetPath);
if (File.Exists(cutsheetPath))
{
var inputDocument2 = CompatiblePdfReader.Open(cutsheetPath);//, PdfDocumentOpenMode.Import);
count = inputDocument2.PageCount;
for(int idx = 0; idx < count; idx++)
{
PdfPage page = inputDocument2.Pages[idx];
outputDocument.AddPage(page);
}
}
}
}
var ms = new MemoryStream();
outputDocument.Save(ms, false);
ms.Position = 0;
return new FileStreamResult(ms, "application/pdf")
{
FileDownloadName = "Report.pdf"
};
}
I am not sure if I am doing anything wrong, I don't understand why this process takes up all the browser's resources. Thanks for any help.
Update: One version of the code that calls doSpecReport. The code around the success doesn't work.
$.ajax({
url: url,
data: qdata,
type: "POST",
success: function (result) { // this doesn't actually work.
var obj = $('<object type="application/pdf" width="100%" height="100%" border="2"></object>');
obj.attr('data', 'data:application/pdf;base64,' + result);
$(".mask").hide();
$('#divContainer').append(obj);
}
});
You have to generate a temporary file location on the server side and return the location url. You cannot set HTML Document contents with binary PDF. Can you generate the report and store it in a temp location and pass its url link in the response. Assuming that doSpecReport generates a temp pdf file named mypdf.pdf,
Then in your success block, you can add it to the object data attribute so that the end object looks like this
<object width="400" height="500" type="application/pdf" data="/my_pdf.pdf?#zoom=85&scrollbar=0&toolbar=0&navpanes=0" id="pdf_content">
</object>