本文整理汇总了C#中IndexWriter类的典型用法代码示例。如果您正苦于以下问题:C# IndexWriter类的具体用法?C# IndexWriter怎么用?C# IndexWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
IndexWriter类属于命名空间,在下文中一共展示了IndexWriter类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C#代码示例。
示例1: CreateRandomTerms
public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir)
{
int[] freq = new int[nTerms];
Terms = new Term[nTerms];
for (int i = 0; i < nTerms; i++)
{
int f = (nTerms + 1) - i; // make first terms less frequent
freq[i] = (int)Math.Ceiling(Math.Pow(f, power));
Terms[i] = new Term("f", char.ToString((char)('A' + i)));
}
IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE));
for (int i = 0; i < nDocs; i++)
{
Document d = new Document();
for (int j = 0; j < nTerms; j++)
{
if (Random().Next(freq[j]) == 0)
{
d.Add(NewStringField("f", Terms[j].Text(), Field.Store.NO));
//System.out.println(d);
}
}
iw.AddDocument(d);
}
iw.ForceMerge(1);
iw.Dispose();
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:28,代码来源:TestScorerPerf.cs
示例2: TestAddSameDocTwice
public virtual void TestAddSameDocTwice()
{
// LUCENE-5367: this was a problem with the previous code, making sure it
// works with the new code.
Directory indexDir = NewDirectory(), taxoDir = NewDirectory();
IndexWriter indexWriter = new IndexWriter(indexDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig facetsConfig = new FacetsConfig();
Document doc = new Document();
doc.Add(new FacetField("a", "b"));
doc = facetsConfig.Build(taxoWriter, doc);
// these two addDocument() used to fail
indexWriter.AddDocument(doc);
indexWriter.AddDocument(doc);
IOUtils.Close(indexWriter, taxoWriter);
DirectoryReader indexReader = DirectoryReader.Open(indexDir);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
IndexSearcher searcher = NewSearcher(indexReader);
FacetsCollector fc = new FacetsCollector();
searcher.Search(new MatchAllDocsQuery(), fc);
Facets facets = GetTaxonomyFacetCounts(taxoReader, facetsConfig, fc);
FacetResult res = facets.GetTopChildren(10, "a");
Assert.AreEqual(1, res.LabelValues.Length);
Assert.AreEqual(2, res.LabelValues[0].Value);
IOUtils.Close(indexReader, taxoReader);
IOUtils.Close(indexDir, taxoDir);
}
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:30,代码来源:TestFacetsConfig.cs
示例3: GenerateHighlights
/// <summary>
/// Annotates the given sequence of <see cref="Document"/> objects by adding a <b>_highlight</b> field;
/// the <b>_highlight</b> field will contain the best matching text fragment from the <see cref="Document"/>
/// object's full-text field.
/// </summary>
/// <param name="hits">The sequence of <see cref="Document"/> objects.</param>
/// <param name="criteria">The search criteria that produced the hits.</param>
/// <returns>
/// The original sequence of Document objects, with a <b>_highlight</b> field added to each Document.
/// </returns>
public static IEnumerable<Document> GenerateHighlights(this IEnumerable<Document> hits, SearchCriteria criteria)
{
if (hits == null)
throw new ArgumentNullException(nameof(hits));
if (criteria == null)
throw new ArgumentNullException(nameof(criteria));
if (String.IsNullOrWhiteSpace(criteria.Query))
throw new ArgumentException("SearchCriteria.Query cannot be empty");
var documents = hits.ToList();
try
{
var indexDirectory = new RAMDirectory();
var analyzer = new FullTextAnalyzer();
var config = new IndexWriterConfig(analyzer).SetRAMBufferSizeMB(_ramBufferSizeMB);
var writer = new IndexWriter(indexDirectory, config);
BuidIndex(documents, writer);
GenerateHighlights(documents, writer, criteria);
writer.DeleteAll();
writer.Commit();
writer.Close();
indexDirectory.Close();
}
catch (Exception ex)
{
_log.Error(ex);
}
return documents;
}
开发者ID:cris-almodovar,项目名称:expando-db,代码行数:42,代码来源:LuceneHighlighter.cs
示例4: Main
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: public static void main(String[] args) throws java.io.IOException
public static void Main(string[] args)
{
if (args.Length < 3)
{
Console.Error.WriteLine("Usage: IndexMergeTool <mergedIndex> <index1> <index2> [index3] ...");
Environment.Exit(1);
}
FSDirectory mergedIndex = FSDirectory.open(new File(args[0]));
IndexWriter writer = new IndexWriter(mergedIndex, new IndexWriterConfig(Version.LUCENE_CURRENT, null)
.setOpenMode(IndexWriterConfig.OpenMode.CREATE));
Directory[] indexes = new Directory[args.Length - 1];
for (int i = 1; i < args.Length; i++)
{
indexes[i - 1] = FSDirectory.open(new File(args[i]));
}
Console.WriteLine("Merging...");
writer.addIndexes(indexes);
Console.WriteLine("Full merge...");
writer.forceMerge(1);
writer.close();
Console.WriteLine("Done.");
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:28,代码来源:IndexMergeTool.cs
示例5: TestCustomLockFactory
public virtual void TestCustomLockFactory()
{
Directory dir = new MockDirectoryWrapper(Random(), new RAMDirectory());
MockLockFactory lf = new MockLockFactory(this);
dir.LockFactory = lf;
// Lock prefix should have been set:
Assert.IsTrue(lf.LockPrefixSet, "lock prefix was not set by the RAMDirectory");
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
// add 100 documents (so that commit lock is used)
for (int i = 0; i < 100; i++)
{
AddDoc(writer);
}
// Both write lock and commit lock should have been created:
Assert.AreEqual(1, lf.LocksCreated.Count, "# of unique locks created (after instantiating IndexWriter)");
Assert.IsTrue(lf.MakeLockCount >= 1, "# calls to makeLock is 0 (after instantiating IndexWriter)");
foreach (String lockName in lf.LocksCreated.Keys)
{
MockLockFactory.MockLock @lock = (MockLockFactory.MockLock)lf.LocksCreated[lockName];
Assert.IsTrue(@lock.LockAttempts > 0, "# calls to Lock.obtain is 0 (after instantiating IndexWriter)");
}
writer.Dispose();
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:29,代码来源:TestLockFactory.cs
示例6: TestMmapIndex
public virtual void TestMmapIndex()
{
// sometimes the directory is not cleaned by rmDir, because on Windows it
// may take some time until the files are finally dereferenced. So clean the
// directory up front, or otherwise new IndexWriter will fail.
DirectoryInfo dirPath = CreateTempDir("testLuceneMmap");
RmDir(dirPath);
MMapDirectory dir = new MMapDirectory(dirPath, null);
// plan to add a set of useful stopwords, consider changing some of the
// interior filters.
MockAnalyzer analyzer = new MockAnalyzer(Random());
// TODO: something about lock timeouts and leftover locks.
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(IndexWriterConfig.OpenMode_e.CREATE));
writer.Commit();
IndexReader reader = DirectoryReader.Open(dir);
IndexSearcher searcher = NewSearcher(reader);
int num = AtLeast(1000);
for (int dx = 0; dx < num; dx++)
{
string f = RandomField();
Document doc = new Document();
doc.Add(NewTextField("data", f, Field.Store.YES));
writer.AddDocument(doc);
}
reader.Dispose();
writer.Dispose();
RmDir(dirPath);
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:31,代码来源:TestWindowsMMap.cs
示例7: LogDocMergePolicy
public LogDocMergePolicy(IndexWriter writer):base(writer)
{
minMergeSize = DEFAULT_MIN_MERGE_DOCS;
// maxMergeSize is never used by LogDocMergePolicy; set
// it to Long.MAX_VALUE to disable it
maxMergeSize = System.Int64.MaxValue;
}
开发者ID:mindis,项目名称:Transformalize,代码行数:8,代码来源:LogDocMergePolicy.cs
示例8: LogByteSizeMergePolicy
public LogByteSizeMergePolicy(IndexWriter writer)
: base(writer)
{
minMergeSize = (long) (DEFAULT_MIN_MERGE_MB * 1024 * 1024);
//mgarski - the line below causes an overflow in .NET, resulting in a negative number...
//maxMergeSize = (long) (DEFAULT_MAX_MERGE_MB * 1024 * 1024);
maxMergeSize = DEFAULT_MAX_MERGE_MB;
}
开发者ID:mindis,项目名称:Transformalize,代码行数:8,代码来源:LogByteSizeMergePolicy.cs
示例9: GetMergeThread
protected override MergeThread GetMergeThread(IndexWriter writer, MergePolicy.OneMerge merge)
{
MergeThread thread = new MyMergeThread(this, writer, merge);
thread.ThreadPriority = MergeThreadPriority;
thread.SetDaemon(true);
thread.Name = "MyMergeThread";
return thread;
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:8,代码来源:TestMergeSchedulerExternal.cs
示例10: AddDocs2
private void AddDocs2(IndexWriter writer, int numDocs)
{
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(NewTextField("content", "bbb", Field.Store.NO));
writer.AddDocument(doc);
}
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:9,代码来源:TestPerFieldPostingsFormat2.cs
示例11: IndexerThread
public IndexerThread(IndexWriter w, FacetsConfig config, TaxonomyWriter tw, ReferenceManager<SearcherAndTaxonomy> mgr, int ordLimit, AtomicBoolean stop)
{
this.w = w;
this.config = config;
this.tw = tw;
this.mgr = mgr;
this.ordLimit = ordLimit;
this.stop = stop;
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:9,代码来源:TestSearcherTaxonomyManager.cs
示例12: AddDocs3
private void AddDocs3(IndexWriter writer, int numDocs)
{
for (int i = 0; i < numDocs; i++)
{
Document doc = new Document();
doc.Add(NewTextField("content", "ccc", Field.Store.NO));
doc.Add(NewStringField("id", "" + i, Field.Store.YES));
writer.AddDocument(doc);
}
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:10,代码来源:TestPerFieldPostingsFormat2.cs
示例13: NewWriter
private IndexWriter NewWriter(Directory dir, IndexWriterConfig conf)
{
LogDocMergePolicy logByteSizeMergePolicy = new LogDocMergePolicy();
logByteSizeMergePolicy.NoCFSRatio = 0.0; // make sure we use plain
// files
conf.SetMergePolicy(logByteSizeMergePolicy);
IndexWriter writer = new IndexWriter(dir, conf);
return writer;
}
开发者ID:joyanta,项目名称:lucene.net,代码行数:10,代码来源:TestPerFieldPostingsFormat2.cs
示例14: TearDown
public override void TearDown()
{
Iw.Dispose();
TestUtil.CheckIndex(Dir); // for some extra coverage, checkIndex before we forceMerge
Iwc.SetOpenMode(IndexWriterConfig.OpenMode_e.APPEND);
IndexWriter iw = new IndexWriter(Dir, (IndexWriterConfig)Iwc.Clone());
iw.ForceMerge(1);
iw.Dispose();
Dir.Dispose(); // just force a checkindex for now
base.TearDown();
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:11,代码来源:TestBlockPostingsFormat2.cs
示例15: CreateDummySearcher
// TODO: this should be setUp()....
public virtual void CreateDummySearcher()
{
// Create a dummy index with nothing in it.
// this could possibly fail if Lucene starts checking for docid ranges...
d = NewDirectory();
IndexWriter iw = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));
iw.AddDocument(new Document());
iw.Dispose();
r = DirectoryReader.Open(d);
s = NewSearcher(r);
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:12,代码来源:TestScorerPerf.cs
示例16: BeforeClassSorterUtilTest
public void BeforeClassSorterUtilTest()
{
// only read the values of the undeleted documents, since after addIndexes,
// the deleted ones will be dropped from the index.
Bits liveDocs = reader.LiveDocs;
List<int> values = new List<int>();
for (int i = 0; i < reader.MaxDoc; i++)
{
if (liveDocs == null || liveDocs.Get(i))
{
values.Add(int.Parse(reader.Document(i).Get(ID_FIELD), CultureInfo.InvariantCulture));
}
}
int idx = Random().nextInt(SORT.Length);
Sort sorter = SORT[idx];
if (idx == 1)
{ // reverse doc sort
values.Reverse();
}
else
{
values.Sort();
if (Random().nextBoolean())
{
sorter = new Sort(new SortField(NUMERIC_DV_FIELD, SortField.Type_e.LONG, true)); // descending
values.Reverse();
}
}
sortedValues = values.ToArray();
if (VERBOSE)
{
Console.WriteLine("sortedValues: " + sortedValues);
Console.WriteLine("Sorter: " + sorter);
}
Directory target = NewDirectory();
using (IndexWriter writer = new IndexWriter(target, NewIndexWriterConfig(TEST_VERSION_CURRENT, null)))
{
using (reader = SortingAtomicReader.Wrap(reader, sorter))
{
writer.AddIndexes(reader);
}
}
dir.Dispose();
// CheckIndex the target directory
dir = target;
TestUtil.CheckIndex(dir);
// set reader for tests
reader = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir));
assertFalse("index should not have deletions", reader.HasDeletions);
}
开发者ID:ChristopherHaws,项目名称:lucenenet,代码行数:53,代码来源:IndexSortingTest.cs
示例17: Merge
/// <summary>Just do the merges in sequence. We do this
/// "synchronized" so that even if the application is using
/// multiple threads, only one merge may run at a time.
/// </summary>
public override void Merge(IndexWriter writer)
{
lock (this)
{
while (true)
{
MergePolicy.OneMerge merge = writer.GetNextMerge();
if (merge == null)
break;
writer.Merge(merge);
}
}
}
开发者ID:JokerMisfits,项目名称:linux-packaging-mono,代码行数:17,代码来源:SerialMergeScheduler.cs
示例18: Merge
/// <summary>
/// Just do the merges in sequence. We do this
/// "synchronized" so that even if the application is using
/// multiple threads, only one merge may run at a time.
/// </summary>
public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound)
{
lock (this)
{
while (true)
{
MergePolicy.OneMerge merge = writer.NextMerge;
if (merge == null)
{
break;
}
writer.Merge(merge);
}
}
}
开发者ID:Cefa68000,项目名称:lucenenet,代码行数:20,代码来源:SerialMergeScheduler.cs
示例19: TestFailIfIndexWriterNotClosed
public virtual void TestFailIfIndexWriterNotClosed()
{
MockDirectoryWrapper dir = NewMockDirectory();
IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, null));
try
{
dir.Dispose();
Assert.Fail();
}
catch (Exception expected)
{
Assert.IsTrue(expected.Message.Contains("there are still open locks"));
}
iw.Dispose();
dir.Dispose();
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:16,代码来源:TestMockDirectoryWrapper.cs
示例20: TestBasic
public virtual void TestBasic()
{
HashSet<string> fileExtensions = new HashSet<string>();
fileExtensions.Add(Lucene40StoredFieldsWriter.FIELDS_EXTENSION);
fileExtensions.Add(Lucene40StoredFieldsWriter.FIELDS_INDEX_EXTENSION);
MockDirectoryWrapper primaryDir = new MockDirectoryWrapper(Random(), new RAMDirectory());
primaryDir.CheckIndexOnClose = false; // only part of an index
MockDirectoryWrapper secondaryDir = new MockDirectoryWrapper(Random(), new RAMDirectory());
secondaryDir.CheckIndexOnClose = false; // only part of an index
FileSwitchDirectory fsd = new FileSwitchDirectory(fileExtensions, primaryDir, secondaryDir, true);
// for now we wire Lucene40Codec because we rely upon its specific impl
bool oldValue = OLD_FORMAT_IMPERSONATION_IS_ACTIVE;
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = true;
IndexWriter writer = new IndexWriter(fsd, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random()))).SetMergePolicy(NewLogMergePolicy(false)).SetCodec(Codec.ForName("Lucene40")).SetUseCompoundFile(false));
TestIndexWriterReader.CreateIndexNoClose(true, "ram", writer);
IndexReader reader = DirectoryReader.Open(writer, true);
Assert.AreEqual(100, reader.MaxDoc);
writer.Commit();
// we should see only fdx,fdt files here
string[] files = primaryDir.ListAll();
Assert.IsTrue(files.Length > 0);
for (int x = 0; x < files.Length; x++)
{
string ext = FileSwitchDirectory.GetExtension(files[x]);
Assert.IsTrue(fileExtensions.Contains(ext));
}
files = secondaryDir.ListAll();
Assert.IsTrue(files.Length > 0);
// we should not see fdx,fdt files here
for (int x = 0; x < files.Length; x++)
{
string ext = FileSwitchDirectory.GetExtension(files[x]);
Assert.IsFalse(fileExtensions.Contains(ext));
}
reader.Dispose();
writer.Dispose();
files = fsd.ListAll();
for (int i = 0; i < files.Length; i++)
{
Assert.IsNotNull(files[i]);
}
fsd.Dispose();
OLD_FORMAT_IMPERSONATION_IS_ACTIVE = oldValue;
}
开发者ID:paulirwin,项目名称:lucene.net,代码行数:47,代码来源:TestFileSwitchDirectory.cs
注:本文中的IndexWriter类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论