C# Json post request with redirect - c#

I am trying to make a login POST request with json to this website
Link,and follow the redirect.My current program works fine if the login details are wrong.If the details are wrong I get the '(401) Unauthorized.' message,which means the Post request was succesful.
However,my problem is that,if the login details are correct,I get the '(400) Bad Request'. I have no idea why this happens and I am currently stuck at this point.
Here is my code,and I hope someone can help me out:
static string url = "https://auth.riotgames.com/authz/auth";
static string uriString = "";
static void Main(string[] args)
{
var request_check = (HttpWebRequest)HttpWebRequest.Create("https://auth.riotgames.com/authz/auth");
request_check.Host = "auth.riotgames.com";
request_check.UserAgent = "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:47.0) Gecko/20100101 Firefox/47.0";
request_check.Accept = "application/json, text/javascript, */*; q=0.01";
request_check.Headers.Add("Accept-Language", "en-US,en;q=0.5");
request_check.Headers.Add("Accept-Encoding", "gzip, deflate, br");
request_check.ContentType = "application/json";
request_check.Headers.Add("X-Requested-With", "XMLHttpRequest");
request_check.Referer = "https://auth.riotgames.com/authorize?response_type=code&scope=openid%20email&client_id=merch-store-client&ui_locales=de-DE&login_hint=euw&redirect_uri=https://euw.merch.riotgames.com/de/riot_sso/auth/redirect/";
var cookieContainer = new CookieContainer();
request_check.CookieContainer = cookieContainer;
request_check.Method = "POST";
request_check.KeepAlive = true;
request_check.AllowAutoRedirect = false;
// Account details Senturia:a12365478
using (var streamWriter = new StreamWriter(request_check.GetRequestStream()))
{
string json = "{\"username\":\"Senturia\",\"password\":\"a12365478\",\"remember\":false,\"region\":\"EUW1\",\"language\":\"de_DE\",\"lang\":\"de_DE\"}";
streamWriter.Write(json);
}
try
{
// Get the response ...
using (var webResponse = (HttpWebResponse)request_check.GetResponse())
{
// Now look to see if it's a redirect
if ((int)webResponse.StatusCode >= 300 && (int)webResponse.StatusCode <= 399)
{
uriString = webResponse.Headers["Location"];
Console.WriteLine("Redirect to " + uriString ?? "NULL");
}
}
}
catch(Exception e)
{
Console.WriteLine(e.Message);
}
Console.ReadKey();
}

When an HTTP request fails you can catch a WebException, and read the response from the server as it might contain useful information about the reason why the request failed:
catch (WebException e)
{
using (var stream = e.Response.GetResponseStream())
using (var reader = new StreamReader(stream))
{
Console.WriteLine(reader.ReadToEnd());
}
}
In your case this prints:
{"error":"invalid_session_id","error_description":"Missing session id."}
So I guess that the server requires some session id parameter to be sent along with the request. Consult the documentation of the endpoint you are trying to invoke for more details on how to do that.

Related

In code session Id and AllowAutoRedirect Not Working. Getting 302

I'm trying to scrape data from the website in my code below. The site requires that I set a session id to proceed to the second page. I'm trying to extract the session id from the first page and add it as a cookie in the request of the second page but this always returns a 302 error. However if I use a web browser and extract the session id via the developer window and hard code this into the second page request it always works. I've used Fiddler, which generated the below but still no luck in solving this.
private static void Main()
{
try
{
HttpWebResponse response;
string sessionId = "";
if (Request_flow_gassco_no(out response))
{
StreamReader sReade1 = new StreamReader(response.GetResponseStream());
string HTM1 = sReade1.ReadToEnd();
sessionId = response.Headers["Set-Cookie"];
response.Close();
}
sessionId = sessionId.Split('=').GetValue(1).ToString().Trim().Split(';').GetValue(0).ToString().Trim();
//s = "4AEEFECB6A59102D0C2F4AC2DBA4362D";
if (Request_flow_gassco_no_disclaimer(out response, sessionId))
{
StreamReader sReade1 = new StreamReader(response.GetResponseStream());
string HTM1 = sReade1.ReadToEnd();
response.Close();
}
}
}
private static bool Request_flow_gassco_no(out HttpWebResponse response)
{
response = null;
try
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create("http://flow.gassco.no/disclaimer");
request.Headers.Add("Upgrade-Insecure-Requests", #"1");
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36";
request.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8";
request.Headers.Set(HttpRequestHeader.AcceptEncoding, "gzip, deflate");
request.Headers.Set(HttpRequestHeader.AcceptLanguage, "en-GB,en-US;q=0.9,en;q=0.8");
response = (HttpWebResponse)request.GetResponse();
}
catch (WebException e)
{
if (e.Status == WebExceptionStatus.ProtocolError) response = (HttpWebResponse)e.Response;
else return false;
}
catch (Exception)
{
if (response != null) response.Close();
return false;
}
return true;
}
private static bool Request_flow_gassco_no_disclaimer(out HttpWebResponse response, string session)
{
response = null;
try
{
Uri target = new Uri("http://flow.gassco.no/");
var cookieContainer = new CookieContainer();
var cookies = new Cookie("JSESSIONID", session) { Domain = target.Host };
cookieContainer.Add(cookies);
HttpWebRequest request = (HttpWebRequest)WebRequest.Create("http://flow.gassco.no/disclaimer/acceptDisclaimer?");
request.CookieContainer = cookieContainer;
request.AllowAutoRedirect = true;
request.Headers.Add("Upgrade-Insecure-Requests", #"1");
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36";
request.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8";
request.Referer = "http://flow.gassco.no/disclaimer";
request.Headers.Set(HttpRequestHeader.AcceptEncoding, "gzip, deflate");
request.Headers.Set(HttpRequestHeader.AcceptLanguage, "en-GB,en-US;q=0.9,en;q=0.8");
response = (HttpWebResponse)request.GetResponse();
}
catch (WebException e)
{
if (e.Status == WebExceptionStatus.ProtocolError) response = (HttpWebResponse)e.Response;
else return false;
}
catch (Exception)
{
if (response != null) response.Close();
return false;
}
return true;
}
Is there a difference between a session id returned in a web browser compared to one returned in a httpwebresponse?
Found the answer to this. The steps to get it working were as follows:
Make the initial request and extract the cookie from the response. I used a cookie container for this.
Make a request to the acceptdisclaimer page. Assigned your cookie container from the first request and also set your request to NOT allow auto redirect.
Make a request to the root url again assigning the cookie container. The response returns the expected html.
I hope this helps someone in the future.
What is the value of 's' after this line is executed?
s="JSESSIONID="+s.Split('=').GetValue(1).ToString().Trim().Split(';').GetValue(0).ToString().Trim();
From the looks, I think it will be "JSESSIONID=__utma" which is not what you want.

C# HTTP GET request to http://sede.educacion.gob.es/

I am trying to perform a GET request to https://sede.educacion.gob.es/publiventa/catalogo.action?cod=E; with the cod=E parameter, in the browser, the web site open a menu below "Materias de educaciĆ³n", but when I perform the request using C# this menu is not loading and I need it. This is the code I am using to readHtml as string to later parse it with HtmlAgilityPack.
private string readHtml(string urlAddress)
{
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(urlAddress);
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:56.0) Gecko/20100101 Firefox/56.0";
request.Accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
request.AutomaticDecompression = DecompressionMethods.GZip;
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
if (response.StatusCode == HttpStatusCode.OK)
{
Stream receiveStream = response.GetResponseStream();
StreamReader readStream = null;
if (response.CharacterSet == null)
{
readStream = new StreamReader(receiveStream);
}
else
{
readStream = new StreamReader(receiveStream, Encoding.GetEncoding(response.CharacterSet));
}
string data = readStream.ReadToEnd();
response.Close();
readStream.Close();
return data;
}
return null;
}
The Uri you posted (https://sede.educacion.gob.es/publiventa/catalogo.action?cod=E) uses a Javascript switch to show the Menu content.
When you connect to that Uri (without clicking a menu link), that site shows three different versions of that page.
1) Page with closed menu and proposed new editions
2) Page with closed menu and search engine fields
3) Page with open menu and a selection of the menu content
This switch is based on a internal procedure which records the current session. Unless you click on a menu link (which is connected to an event listener), the Javascript proc shows the page in different states.
I gave it a look; those script are quite long (a whole multi-purpose library) and I had no time to parse it all (may be you can do that) to find out what parameters the event listener is passing.
But, the three-state version switch is constant.
What I mean is you can call that page three times, preserving the Cookie Container: the third time you connect to it, it will stream the whole menu content and its links.
If you request three times the same page, the third time the Html page will
contain all theMaterias de educaciĆ³n links
public async void SomeMethodAsync()
{
string HtmlPage = await GetHttpStream([URI]);
HtmlPage = await GetHttpStream([URI]);
HtmlPage = await GetHttpStream([URI]);
}
This is, more or less, what I used to get that page:
CookieContainer CookieJar = new CookieContainer();
public async Task<string> GetHttpStream(Uri HtmlPage)
{
HttpWebRequest httpRequest;
string Payload = string.Empty;
httpRequest = WebRequest.CreateHttp(HtmlPage);
try
{
httpRequest.CookieContainer = CookieJar;
httpRequest.KeepAlive = true;
httpRequest.ConnectionGroupName = Guid.NewGuid().ToString();
httpRequest.AllowAutoRedirect = true;
httpRequest.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
httpRequest.ServicePoint.MaxIdleTime = 30000;
httpRequest.ServicePoint.Expect100Continue = false;
httpRequest.UserAgent = "Mozilla/5.0 (Windows NT 10; Win64; x64; rv:56.0) Gecko/20100101 Firefox/56.0";
httpRequest.Accept = "ext/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
httpRequest.Headers.Add(HttpRequestHeader.AcceptLanguage, "es-ES,es;q=0.8,en-US;q=0.5,en;q=0.3");
httpRequest.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip, deflate;q=0.8");
httpRequest.Headers.Add(HttpRequestHeader.CacheControl, "no-cache");
using (HttpWebResponse httpResponse = (HttpWebResponse)await httpRequest.GetResponseAsync())
{
Stream ResponseStream = httpResponse.GetResponseStream();
if (httpResponse.StatusCode == HttpStatusCode.OK)
{
try
{
//ResponseStream.Position = 0;
Encoding encoding = Encoding.GetEncoding(httpResponse.CharacterSet);
using (MemoryStream _memStream = new MemoryStream())
{
if (httpResponse.ContentEncoding.Contains("gzip"))
{
using (GZipStream _gzipStream = new GZipStream(ResponseStream, System.IO.Compression.CompressionMode.Decompress))
{
_gzipStream.CopyTo(_memStream);
};
}
else if (httpResponse.ContentEncoding.Contains("deflate"))
{
using (DeflateStream _deflStream = new DeflateStream(ResponseStream, System.IO.Compression.CompressionMode.Decompress))
{
_deflStream.CopyTo(_memStream);
};
}
else
{
ResponseStream.CopyTo(_memStream);
}
_memStream.Position = 0;
using (StreamReader _reader = new StreamReader(_memStream, encoding))
{
Payload = _reader.ReadToEnd().Trim();
};
};
}
catch (Exception)
{
Payload = string.Empty;
}
}
}
}
catch (WebException exW)
{
if (exW.Response != null)
{
//Handle WebException
}
}
catch (System.Exception exS)
{
//Handle System.Exception
}
CookieJar = httpRequest.CookieContainer;
return Payload;
}

Getting data from google play with C# HttpWebRequest and it's response unlike browser

I try to getting data from google play web page with C# HttpWebRequest but when it's response I got difference result
Code:
public const string googlePlayUrl = "https://play.google.com/store/apps/details?id=";
public void GetData(string packageName) {
HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(new Uri(googlePlayUrl + packageName));
request.Method = WebRequestMethods.Http.Get;
request.ContentType = "text/html";
request.UserAgent = "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:19.0) Gecko/20100101 Firefox/19.0";
request.AutomaticDecompression = DecompressionMethods.GZip;
request.BeginGetResponse((IAsyncResult asynchronousResult) =>
{
HttpWebRequest requested = (HttpWebRequest)asynchronousResult.AsyncState;
using (HttpWebResponse response = (HttpWebResponse)requested.EndGetResponse(asynchronousResult))
{
System.IO.Stream responseStream = response.GetResponseStream();
using (StreamReader reader = new StreamReader(responseStream))
{
Console.WriteLine(reader.ReadToEnd());
}
responseStream.Close();
}
}, request);
}
Request connection is fine, I got response but it's difference than when I access the web-page with browser. It's no elements that I want to use such as
div.id-app-title
span attr[itemprop="genre"]
div attr[itemprop="description"]
Not sure why, I've try to set its user-agent but it still not work or maybe I set it wrong.
Wish someone have solution for that :)
Assuming your public IP address has not been blocked by Google, you can use the synchronous method request.GetResponse() together with the Parallel.ForEach() as shown below:
public static string GetDataSync(string packageName)
{
string result = "";
Uri uri = new Uri(googlePlayUrl + packageName);
var request = HttpWebRequest.Create(uri);
var response = request.GetResponse();
var responseStream = response.GetResponseStream();
using (StreamReader reader = new StreamReader(responseStream))
{
result = (reader.ReadToEnd());
}
responseStream.Close();
return result;
}
Call the method above using Parallel.ForEach and a tread-safe collection ConcurrentDictionary to store the html string result per package:
IEnumerable<string> appPackages = new List<string>() {
"com.google.android.apps.youtube.music",
"com.netflix.mediaclient"
};
ConcurrentDictionary<string, string> results =
new ConcurrentDictionary<string, string>(Environment.ProcessorCount, appPackages.Count());
Parallel.ForEach(appPackages, (app) =>
{
results.TryAdd(app, GetDataSync(app));
});

C# WebClient login to accounts.google.com

I have very difficult time trying to authenticate to accounts.google.com using webclient
I'm using C# WebClient object to achieve following.
I'm submitting form fields to https://accounts.google.com/ServiceLoginAuth?service=oz
Here is POST Fields:
service=oz
dsh=-8355435623354577691
GALX=33xq1Ma_CKI
timeStmp=
secTok=
Email=test#test.xom
Passwd=password
signIn=Sign in
PersistentCookie=yes
rmShown=1
Now when login page loads before I submit data it has following headers:
Content-Type text/html; charset=UTF-8
Strict-Transport-Security max-age=2592000; includeSubDomains
Set-Cookie GAPS=1:QClFh_dKle5DhcdGwmU3m6FiPqPoqw:SqdLB2u4P2oGjt_x;Path=/;Expires=Sat, 21-Dec-2013 07:31:40 GMT;Secure;HttpOnly
Cache-Control no-cache, no-store
Pragma no-cache
Expires Mon, 01-Jan-1990 00:00:00 GMT
X-Frame-Options Deny
X-Auto-Login realm=com.google&args=service%3Doz%26continue%3Dhttps%253A%252F%252Faccounts.google.com%252FManageAccount
Content-Encoding gzip
Transfer-Encoding chunked
Date Thu, 22 Dec 2011 07:31:40 GMT
X-Content-Type-Options nosniff
X-XSS-Protection 1; mode=block
Server GSE
OK now how do I use WebClient Class to include those headers?
I have tried webClient_.Headers.Add(); but it has limited effect and always returns login page.
Below is a class that I use. Would appreciate any help.
Getting login page
public void LoginPageRequest(Account acc)
{
var rparams = new RequestParams();
rparams.URL = #"https://accounts.google.com/ServiceLoginAuth?service=oz";
rparams.RequestName = "LoginPage";
rparams.Account = acc;
webClient_.DownloadDataAsync(new Uri(rparams.URL), rparams);
}
void webClient__DownloadDataCompleted(object sender, DownloadDataCompletedEventArgs e)
{
RequestParams rparams = (RequestParams)e.UserState;
if (rparams.RequestName == "LoginPage")
{
ParseLoginRequest(e.Result, e.UserState);
}
}
Now getting form fields using HtmlAgilityPack and adding them into Parameters collection
public void ParseLoginRequest(byte[] data, object UserState)
{
RequestParams rparams = (RequestParams)UserState;
rparams.ClearParams();
ASCIIEncoding encoder = new ASCIIEncoding();
string html = encoder.GetString(data);
HtmlNode.ElementsFlags.Remove("form");
HtmlDocument doc = new HtmlDocument();
doc.LoadHtml(html);
HtmlNode form = doc.GetElementbyId("gaia_loginform");
rparams.URL = form.GetAttributeValue("action", string.Empty);
rparams.RequestName = "LoginPost";
var inputs = form.Descendants("input");
foreach (var element in inputs)
{
string name = element.GetAttributeValue("name", "undefined");
string value = element.GetAttributeValue("value", "");
if (!name.Equals("undefined")) {
if (name.ToLower().Equals("email"))
{
value = rparams.Account.Email;
}
else if (name.ToLower().Equals("passwd"))
{
value = rparams.Account.Password;
}
rparams.AddParam(name,value);
Console.WriteLine(name + "-" + value);
}
}
webClient_.UploadValuesAsync(new Uri(rparams.URL),"POST", rparams.GetParams,rparams);
After I post the data I get login page rather than redirect or success message.
What am I doing wrong?
After some fiddling around, it looks like the WebClient class is not the best approach to this particular problem.
To achieve following goal I had to jump one level below to WebRequest.
When making WebRequest (HttpWebRequest) and using HttpWebResponse it is possible to set CookieContainer
webRequest_ = (HttpWebRequest)HttpWebRequest.Create(rparams.URL);
webRequest_.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)";
CookieContainer cookieJar = new CookieContainer();
webRequest_.CookieContainer = cookieJar;
string html = string.Empty;
try
{
using (WebResponse response = webRequest_.GetResponse())
{
using (var streamReader = new StreamReader(response.GetResponseStream()))
{
html = streamReader.ReadToEnd();
ParseLoginRequest(html, response,cookieJar);
}
}
}
catch (WebException e)
{
using (WebResponse response = e.Response)
{
HttpWebResponse httpResponse = (HttpWebResponse)response;
Console.WriteLine("Error code: {0}", httpResponse.StatusCode);
using (var streamReader = new StreamReader(response.GetResponseStream()))
Console.WriteLine(html = streamReader.ReadToEnd());
}
}
and then when making post use the same Cookie Container in following manner
webRequest_ = (HttpWebRequest)HttpWebRequest.Create(rparams.URL);
webRequest_.UserAgent = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)";
webRequest_.Method = "POST";
webRequest_.ContentType = "application/x-www-form-urlencoded";
webRequest_.CookieContainer = cookieJar;
var parameters = new StringBuilder();
foreach (var key in rparams.Params)
{
parameters.AppendFormat("{0}={1}&",HttpUtility.UrlEncode(key.ToString()),
HttpUtility.UrlEncode(rparams.Params[key.ToString()]));
}
parameters.Length -= 1;
using (var writer = new StreamWriter(webRequest_.GetRequestStream()))
{
writer.Write(parameters.ToString());
}
string html = string.Empty;
using (response = webRequest_.GetResponse())
{
using (var streamReader = new StreamReader(response.GetResponseStream()))
{
html = streamReader.ReadToEnd();
}
}
So this works, this code is not for production use and can be/should be optimized.
Treat it just as an example.
This is a quick example written in the answer pane and untested. You will probably need to parse some values out of an initial request for some form values to go in to formData. A lot of my code is based on this type of process unless we need to scrape facebook spokeo type sites in which case the ajax makes us use a different approach.
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Linq;
using System.Text;
namespace GMailTest
{
class Program
{
private static NameValueCollection formData = new NameValueCollection();
private static CookieAwareWebClient webClient = new CookieAwareWebClient();
static void Main(string[] args)
{
formData.Clear();
formData["service"] = "oz";
formData["dsh"] = "-8355435623354577691";
formData["GALX"] = "33xq1Ma_CKI";
formData["timeStmp"] = "";
formData["secTok"] = "";
formData["Email"] = "test#test.xom";
formData["Passwd"] = "password";
formData["signIn"] = "Sign in";
formData["PersistentCookie"] = "yes";
formData["rmShown"] = "1";
byte[] responseBytes = webClient.UploadValues("https://accounts.google.com/ServiceLoginAuth?service=oz", "POST", formData);
string responseHTML = Encoding.UTF8.GetString(responseBytes);
}
}
public class CookieAwareWebClient : WebClient
{
public CookieAwareWebClient() : this(new CookieContainer())
{ }
public CookieAwareWebClient(CookieContainer c)
{
this.CookieContainer = c;
this.Headers.Add("User-Agent: Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.52 Safari/536.5");
}
public CookieContainer CookieContainer { get; set; }
protected override WebRequest GetWebRequest(Uri address)
{
WebRequest request = base.GetWebRequest(address);
if (request is HttpWebRequest)
{
(request as HttpWebRequest).CookieContainer = this.CookieContainer;
}
return request;
}
}
}

how to login in https sites with the help of webrequest and response

how to login in https sites with the help of webrequst and webresponse in c# .
here is the code
public string postFormData(Uri formActionUrl, string postData)
{
gRequest = (HttpWebRequest)WebRequest.Create(formActionUrl);
gRequest.UserAgent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.4) Gecko/2008102920 Firefox/3.0.4";
gRequest.CookieContainer = new CookieContainer();
gRequest.Method = "POST";
gRequest.Accept = " text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8, */*";
gRequest.KeepAlive = true;
gRequest.ContentType = #"text/html; charset=iso-8859-1";
#region CookieManagement
if (this.gCookies != null && this.gCookies.Count > 0)
{
gRequest.CookieContainer.Add(gCookies);
}
//logic to postdata to the form
string postdata = string.Format(postData);
byte[] postBuffer = System.Text.Encoding.GetEncoding(1252).GetBytes(postData);
gRequest.ContentLength = postBuffer.Length;
Stream postDataStream = gRequest.GetRequestStream();
postDataStream.Write(postBuffer, 0, postBuffer.Length);
postDataStream.Close();
//post data logic ends
//Get Response for this request url
gResponse = (HttpWebResponse)gRequest.GetResponse();
//check if the status code is http 200 or http ok
if (gResponse.StatusCode == HttpStatusCode.OK)
{
//get all the cookies from the current request and add them to the response object cookies
gResponse.Cookies = gRequest.CookieContainer.GetCookies(gRequest.RequestUri);
//check if response object has any cookies or not
if (gResponse.Cookies.Count > 0)
{
//check if this is the first request/response, if this is the response of first request gCookies
//will be null
if (this.gCookies == null)
{
gCookies = gResponse.Cookies;
}
else
{
foreach (Cookie oRespCookie in gResponse.Cookies)
{
bool bMatch = false;
foreach (Cookie oReqCookie in this.gCookies)
{
if (oReqCookie.Name == oRespCookie.Name)
{
oReqCookie.Value = oRespCookie.Name;
bMatch = true;
break; //
}
}
if (!bMatch)
this.gCookies.Add(oRespCookie);
}
}
}
#endregion
StreamReader reader = new StreamReader(gResponse.GetResponseStream());
string responseString = reader.ReadToEnd();
reader.Close();
//Console.Write("Response String:" + responseString);
return responseString;
}
else
{
return "Error in posting data";
}
}
// calling the above function
httphelper.postFormData(new Uri("https://login.yahoo.com/config/login?.done=http://answers.yahoo.com%2f&.src=knowsrch&.intl=us"), ".tries=1&.src=knowsrch&.md5=&.hash=&.js=&.last=&promo=&.intl=us&.bypass=&.partner=&.u=0b440p15q1nmb&.v=0&.challenge=Rt_fM1duQiNDnI5SrzAY_GETpNTL&.yplus=&.emailCode=&pkg=&stepid=&.ev=&hasMsgr=0&.chkP=Y&.done=http%3A%2F%2Fanswers.yahoo.com%2F&.pd=knowsrch_ver%3D0%26c%3D%26ivt%3D%26sg%3D&login=xyz&passwd=xyz&.save=Sign+In");
You need to see how authentication works for the site you are working with.
This may be through cookies, special headers, hidden field or something else.
Fire up a tool like Fiddler and see what the network traffic is like when logging in and how it is different from not being logged in
Recreate this logic with WebRequest and WebResponse.
See the answers to this SO question (HttpRequest: pass through AuthLogin).
What for? Watin is good for testing and such, and it's easy to do basic screen scraping with it. Why reinvent the wheel if you don't have to.
you can set the WebRequest.Credentials property. for an example and documentation see:
http://msdn.microsoft.com/en-us/library/system.net.networkcredential.aspx

Categories

Resources