B
Bruce W.1
I wrote this aspx link checker page. In it is a textbox where the user
enters a URL. This app then goes out and extracts all the links from
the webpage for the given URL and puts them in an arrayList of strings.
Then it checks each URL string to see if it's good. Below is the method
that is supposed to do this. It however gives the following error:
The underlying connection was closed: The remote name could not be
resolved.
Each URL string in the arrayList looks like this:
href="http://www.whatever.com"
The error may be caused by a URL that's no good, and that's fine. But
this method should skip over bad URL's and go on to the next one, rather
than making the whole thing croak.
How can I gracefully recover from errors and move on to the next URL?
Thanks for your help.
Code:
protected void CheckLinks(ArrayList urlA)
{
foreach(string s in urlA)
{
string x = s.Substring(6);
int n = x.Length;
string v = x.Substring(0, n-1);
resultLiteral.Text += "Checking link: " + v + "<br />";
String result;
WebResponse objResponse;
StreamReader sr; // = new StreamReader();
WebRequest objRequest = System.Net.HttpWebRequest.Create(v);
try
{
objResponse = objRequest.GetResponse();
sr = new StreamReader(objResponse.GetResponseStream());
result = sr.ReadToEnd();
sr.Close();
}
catch (Exception e)
{
resultLiteral.Text += "Failure: " + e.ToString();
result = "";
Trace.Warn(e.ToString());
}
finally
{
}
if(!result.Equals(""))
{
resultLiteral.Text += "Successful<br />";
}
else
{
resultLiteral.Text += "Failure<br />";
}
}
}
enters a URL. This app then goes out and extracts all the links from
the webpage for the given URL and puts them in an arrayList of strings.
Then it checks each URL string to see if it's good. Below is the method
that is supposed to do this. It however gives the following error:
The underlying connection was closed: The remote name could not be
resolved.
Each URL string in the arrayList looks like this:
href="http://www.whatever.com"
The error may be caused by a URL that's no good, and that's fine. But
this method should skip over bad URL's and go on to the next one, rather
than making the whole thing croak.
How can I gracefully recover from errors and move on to the next URL?
Thanks for your help.
Code:
protected void CheckLinks(ArrayList urlA)
{
foreach(string s in urlA)
{
string x = s.Substring(6);
int n = x.Length;
string v = x.Substring(0, n-1);
resultLiteral.Text += "Checking link: " + v + "<br />";
String result;
WebResponse objResponse;
StreamReader sr; // = new StreamReader();
WebRequest objRequest = System.Net.HttpWebRequest.Create(v);
try
{
objResponse = objRequest.GetResponse();
sr = new StreamReader(objResponse.GetResponseStream());
result = sr.ReadToEnd();
sr.Close();
}
catch (Exception e)
{
resultLiteral.Text += "Failure: " + e.ToString();
result = "";
Trace.Warn(e.ToString());
}
finally
{
}
if(!result.Equals(""))
{
resultLiteral.Text += "Successful<br />";
}
else
{
resultLiteral.Text += "Failure<br />";
}
}
}