Using Lucene. Net. Analysis; Using Lucene. Net. Analysis. PanGu; Using Lucene. Net. Documents; Using Lucene. Net. Index; Using Lucene. Net. Search; Using Lucene. Net. Store; Using Model. HelperModel; Using System; Using System. Collections. Generic; Namespace BLL { Public static class SearchBLL { // A class may be output to logs in multiple places. Logs need to be recorded in multiple places. logger is often made into static variables. /// <Summary> /// Log Assistant /// </Summary> Static Common. LogHelper logger = new Common. LogHelper (typeof (SearchBLL )); /// <Summary> /// Index storage location /// </Summary> Static string indexPath = Common. ConfigurationHelper. deleettingmappath ("IndexPath "); /// <Summary> /// Search /// </Summary> /// <Param name = "keywords"> keywords searched by users </param> /// <Returns> returns the search result </returns> Public static List <SearchResult> Search (string keywords) { Try { // Index Repository FSDirectory directory = FSDirectory. Open (new System. IO. DirectoryInfo (indexPath), new NoLockFactory ()); // Create an IndexReader object IndexReader reader = IndexReader. Open (directory, true ); // Create an IndexSearcher object IndexSearcher searcher = new IndexSearcher (reader ); // Create a PhraseQuery query object PhraseQuery query = new PhraseQuery (); // Split the keywords entered by the user Foreach (string word in SplitWord (keywords )) { // Add a search keyword Query. Add (new Term ("body", word )); } // Set the word segmentation interval to within 100 words Query. SetSlop (100 ); TopScoreDocCollector collector = TopScoreDocCollector. create (1000, true ); // Query results based on query Conditions Searcher. Search (query, null, collector ); // ScoreDoc result ScoreDoc [] docs = collector. TopDocs (0, collector. GetTotalHits (). scoreDocs; // Save the list of search results List <SearchResult> listResult = new List <SearchResult> (); For (int I = 0; I <docs. Length; I ++) { // Obtain the document number (primary key, which is allocated by Lucene. net) // Only the Document id is found in the search result. If you want to retrieve the Document, you need the Doc to retrieve it again. // Reduce content occupation Int docId = docspolici2.16.doc; // Search for Document by id Document doc = searcher. Doc (docId ); String number = doc. Get ("id "); String title = doc. Get ("title "); String body = doc. Get ("body "); String url = doc. Get ("url "); // Create a search result object SearchResult result = new SearchResult (); Result. Number = number; Result. Title = title; Result. BodyPreview = Preview (body, keywords ); Result. Url = url; // Add to result list ListResult. Add (result ); } If (listResult. Count = 0) { Return null; } Else { Return listResult; } } Catch (SystemException ex) { Logger. Error (ex ); Return null; } Catch (Exception ex) { Logger. Error (ex ); Return null; } } /// <Summary> /// Get Content preview /// </Summary> /// <Param name = "body"> content </param> /// <Param name = "keyword"> keywords </param> /// <Returns> </returns> Private static string Preview (string body, string keyword) { // Create an HTMLFormatter. The parameter is the prefix and suffix of the highlighted word. PanGu. HighLight. SimpleHTMLFormatter simpleHTMLFormatter = new PanGu. HighLight. SimpleHTMLFormatter ("<font color = \" red \ ">", "</font> "); // Create a Highlighter and input the HTMLFormatter and pangu word segmentation object Semgent PanGu. HighLight. Highlighter highlighter = new PanGu. HighLight. Highlighter (simpleHTMLFormatter, new PanGu. Segment ()); // Set the number of characters for each abstract segment Highlighter. FragmentSize = 100; // Obtain the most matched abstract segment String bodyPreview = highlighter. GetBestFragment (keyword, body ); Return bodyPreview; } /// <Summary> /// Pangu Word Segmentation: Word Segmentation for the search keywords entered by the user /// </Summary> /// <Param name = "str"> keywords entered by the user </param> /// <Returns> An array composed of results after word segmentation </returns> Private static string [] SplitWord (string str) { List <string> list = new List <string> (); Analyzer analyzer = new PanGuAnalyzer (); TokenStream tokenStream = analyzer. TokenStream ("", new System. IO. StringReader (str )); Lucene. Net. Analysis. Token token = null; While (token = tokenStream. Next ())! = Null) { List. Add (token. TermText ()); } Return list. ToArray (); } } } |