添加:2013-12-25
更新:2013-12-26 新增分页功能。
更新:2013-12-27 新增按分类查询功能,调整索引行新增记录的图片字段。
最新盘古分词dll和词典管理工具下载:http://pangusegment.codeplex.com/
词典下载:http://pangusegment.codeplex.com/releases/view/47411
//封装类
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using Lucene.Net.Analysis;
using Lucene.Net.Index;
using Lucene.Net.Documents;
using System.Reflection;
using Lucene.Net.QueryParsers;
using Lucene.Net.Search;
namespace SearchTest
{
/// <summary>
/// 盘古分词在lucene.net中的使用帮助类
/// 调用PanGuLuceneHelper.instance
/// </summary>
public class PanGuLuceneHelper
{
private PanGuLuceneHelper() { }
#region 单一实例
private static PanGuLuceneHelper _instance = null;
/// <summary>
/// 单一实例
/// </summary>
public static PanGuLuceneHelper instance
{
get
{
if (_instance == null) _instance = new PanGuLuceneHelper();
return _instance;
}
}
#endregion
#region 分词测试
/// <summary>
/// 分词测试
/// </summary>
/// <param name="keyword"></param>
/// <returns></returns>
public string Token(string keyword)
{
string ret = "";
System.IO.StringReader reader = new System.IO.StringReader(keyword);
Lucene.Net.Analysis.TokenStream ts = analyzer.TokenStream(keyword, reader);
bool hasNext = ts.IncrementToken();
Lucene.Net.Analysis.Tokenattributes.ITermAttribute ita;
while (hasNext)
{
ita = ts.GetAttribute<Lucene.Net.Analysis.Tokenattributes.ITermAttribute>();
ret += ita.Term + "|";
hasNext = ts.IncrementToken();
}
ts.CloneAttributes();
reader.Close();
analyzer.Close();
return ret;
}
#endregion
#region 创建索引
/// <summary>
/// 创建索引
/// </summary>
/// <param name="datalist"></param>
/// <returns></returns>
public bool CreateIndex(List<MySearchUnit> datalist)
{
IndexWriter writer = null;
try
{
writer = new IndexWriter(directory_luce, analyzer, false, IndexWriter.MaxFieldLength.LIMITED);//false表示追加(true表示删除之前的重新写入)
}
catch
{
writer = new IndexWriter(directory_luce, analyzer, true, IndexWriter.MaxFieldLength.LIMITED);//false表示追加(true表示删除之前的重新写入)
}
foreach (MySearchUnit data in datalist)
{
CreateIndex(writer, data);
}
writer.Optimize();
writer.Dispose();
return true;
}
public bool CreateIndex(IndexWriter writer, MySearchUnit data)
{
try
{
if (data == null) return false;
Document doc = new Document();
Type type = data.GetType();//assembly.GetType("Reflect_test.PurchaseOrderHeadManageModel", true, true); //命名空间名称 + 类名
//创建类的实例
//object obj = Activator.CreateInstance(type, true);
//获取公共属性
PropertyInfo[] Propertys = type.GetProperties();
for (int i = 0; i < Propertys.Length; i++)
{
//Propertys[i].SetValue(Propertys[i], i, null); //设置值
PropertyInfo pi = Propertys[i];
string name=pi.Name;
object objval = pi.GetValue(data, null);
string value = objval == null ? "" : objval.ToString(); //值
if (name == "id" || name=="flag" )//id在写入索引时必是不分词,否则是模糊搜索和删除,会出现混乱
{
doc.Add(new Field(name, value, Field.Store.YES, Field.Index.NOT_ANALYZED));//id不分词
}
else
{
doc.Add(new Field(name, value, Field.Store.YES, Field.Index.ANALYZED));
}
}
writer.AddDocument(doc);
}
catch (System.IO.FileNotFoundException fnfe)
{
throw fnfe;
}
return true;
}
#endregion
#region 在title和content字段中查询数据
/// <summary>
/// 在title和content字段中查询数据
/// </summary>
/// <param name="keyword"></param>
/// <returns></returns>
public List<MySearchUnit> Search(string keyword)
{
string[] fileds = { "title", "content" };//查询字段
//Stopwatch st = new Stopwatch();
//st.Start();
QueryParser parser = null;// new QueryParser(Lucene.Net.Util.Version.LUCENE_30, field, analyzer);//一个字段查询
parser = new MultiFieldQueryParser(version, fileds, analyzer);//多个字段查询
Query query = parser.Parse(keyword);
int n = 1000;
IndexSearcher searcher = new IndexSearcher(directory_luce, true);//true-表示只读
TopDocs docs = searcher.Search(query, (Filter)null, n);
if (docs == null || docs.TotalHits == 0)
{
return null;
}
else
{
List<MySearchUnit> list = new List<MySearchUnit>();
&nb