The role of the site map is to let search engines as soon as possible, more of the site included in the various pages.
Here we first need to understand a basic principle of search engine crawling way. The whole internet is like a criss-cross network: Each node of the Web is a Web page, and each page is connected by a URL. Spiders can start with a Web page, crawl to another page via the URL on the page, and then climb to more pages by using a URL on another page ... But if it is a newly published website, there may not be any other URLs pointing to it, then it will never be "crawled" (included). In order to solve this problem, the new station can take the initiative to submit the URL to the search engine, application spiders come to crawl (Google Application URL:), but the application will generally only submit a homepage URL.
In order for all URLs (especially dynamically generated) to be quickly and conveniently retrieved by spiders, we need to provide a fully integrated, clear and up-to-date sitemap.
and robots.txt files that handle duplicate content, we use the. ashx file to generate a site map based on the sitemaps.org XML format. After the site map is generated, we can submit to search engines such as Google. A large number of articles confirmed that the submission of site maps will greatly improve the site's collection of speed and depth. Almost all other SEO methods, are likely to be difficult to prove the effect, failure or even bring side effects, but submit site map except!
LINQ to XML brings us a near-perfect operational experience.
<%@ WebHandler language= "C #" class= "website"%>
Using System; Using System.Web; Using System.Xml; Using System.Xml.Linq; Using System.Linq;
public class Website:ihttphandler {
public void ProcessRequest (HttpContext context) {
Context. Response.ContentType = "Text/xml";
The declaration information for the file, the value of the third argument standalone yes indicates that the XML document is self-contained (self-contained) and does not depend on a DTD defined externally. Xdeclaration declaration = new Xdeclaration ("1.0", "UTF-8", "yes"); Context. Response.Write (Declaration);
namespaces FOR XML files XNamespace ns = "http://www.google.com/schemas/sitemap/0.84"; XElement siteMap = new XElement (ns + "Urlset");
String fixedurl = "Http://www.freeflying.com/article"; String Wholeurl = String. Empty;
Loop out the data and convert it into an XML node foreach (var item in Articles.getarticles ()) { XElement url = new XElement ("url");
Wholeurl = string. Format (' {0}?id={1}&catelog={2} ', Fixedurl,item.id,item. Catelog); XElement loc = new XElement ("Loc", Wholeurl); XElement lastmod = new XElement ("Lastmod", item. Lastmod.adddays (-23). ToShortDateString ()); XElement changefreq = new XElement ("Changefreq", item. Frequency); XElement priority = new XElement ("Priority", item. Weight);
Url. Add (Loc, Lastmod, changefreq, priority);
Sitemap.add (URL); }
The final output of the entire XML file Context. Response.Write (SITEMAP); }
public bool IsReusable { get { return false; } }
} |
It will also use XML technology and RSS
<%@ WebHandler language= "C #" class= "RSS"%>
Using System; Using System.Web; Using System.Xml; Using System.Xml.Linq;
public class Rss:ihttphandler {
public void ProcessRequest (HttpContext context) { Context. Response.ContentType = "Text/xml";
Context. Response.Write ("<?xml version=\" 1.0\ "encoding=\" utf-8\ "?>");
XElement rssfeed = new XElement ("RSS", New XAttribute ("version", "2.0"));
String fixedurl = "Http://www.freeflying.com/article"; String Wholeurl = String. Empty;
XElement channel = new XElement ("channel", New XElement ("title", "Freeflying"), New XElement ("link", Fixedurl), New XElement ("description", "The website for Dream Flying freely"), New XElement ("pubdate", DateTime.Now.ToString ()) );
foreach (Var article in Articles.getarticles ()) { XElement item = new XElement ("item");
XElement title = new XElement ("title", article). Title);
Wholeurl = string. Format ("{0}?id={1}&catelog={2}", Fixedurl, Article.id, article.) Catelog); XElement link = new XElement ("link", Wholeurl);
XElement Description = new XElement ("description", article. Description);
XElement pubdate = new XElement ("pubdate", article. Lastmod.tostring ());
Item. ADD (title,link,description,pubdate);
Channel. ADD (item); }
Rssfeed.add (channel);
Context. Response.Write (RssFeed);
}
public bool IsReusable { get { return false; } }
} |
Analog data
Using System; Using System.Data; Using System.Configuration; Using System.Linq; Using System.Web; Using System.Web.Security; Using System.Web.UI; Using System.Web.UI.HtmlControls; Using System.Web.UI.WebControls; Using System.Web.UI.WebControls.WebParts; Using System.Xml.Linq; Using System.Web.UI.MobileControls; Using System.Collections.Generic;
<summary> Summary Description for articles </summary> public class articles { Public articles () { // Todo:add constructor Logic here // }
public static list<article> Getarticles () { return new list<article> () { New Article (234, "blog", DateTime.Now.AddDays ( -23), Freq.none, 0.8, "asp.net seo", "articles about SEO in asp.net"), New Article (267, "blog", DateTime.Now.AddDays ( -245), freq.daily, 0.6, "Ado.net Pro", "about the DataSet usage"), New Article (653, News, DateTime.Now.AddDays ( -45), freq.daily, 1, "CLR via C #", "notebook about this book") }; }
}
public class Article { public int ID; public string Catelog; Public DateTime Lastmod; public double Weight; public Freq Frequency; public string Title; public string Description;
public Article (int ID, string catelog, DateTime lastmod, freq frequency, double weight, string title, string description) { id = ID; Catelog = Catelog; Lastmod = Lastmod; Weight = Weight; Frequency = Frequency; title = title; Description = Description; } }
public enum Freq { none = 1, Daily = 2, Weekly = 3, }
|
Author: Free to fly the original link