本文整理汇总了C#中Lucene.Net.Store.RAMDirectory.Close方法的典型用法代码示例。如果您正苦于以下问题:C# RAMDirectory.Close方法的具体用法?C# RAMDirectory.Close怎么用?C# RAMDirectory.Close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Lucene.Net.Store.RAMDirectory
的用法示例。
在下文中一共展示了RAMDirectory.Close方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: TestDetectClose
public virtual void TestDetectClose()
{
Directory dir = new RAMDirectory();
dir.Close();
Assert.Throws<AlreadyClosedException>(() => dir.CreateOutput("test"), "did not hit expected exception");
dir = FSDirectory.Open(new System.IO.DirectoryInfo(AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
dir.Close();
Assert.Throws<AlreadyClosedException>(() => dir.CreateOutput("test"), "did not hit expected exception");
}
示例2: testMissingTerms
public void testMissingTerms()
{
String fieldName = "field1";
Directory rd = new RAMDirectory();
var w = new IndexWriter(rd, new KeywordAnalyzer(), IndexWriter.MaxFieldLength.UNLIMITED);
for (int i = 0; i < 100; i++)
{
var doc = new Document();
int term = i*10; //terms are units of 10;
doc.Add(new Field(fieldName, "" + term, Field.Store.YES, Field.Index.ANALYZED));
w.AddDocument(doc);
}
IndexReader reader = w.GetReader();
w.Close();
TermsFilter tf = new TermsFilter();
tf.AddTerm(new Term(fieldName, "19"));
FixedBitSet bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(0, bits.Cardinality(), "Must match nothing");
tf.AddTerm(new Term(fieldName, "20"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(1, bits.Cardinality(), "Must match 1");
tf.AddTerm(new Term(fieldName, "10"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(2, bits.Cardinality(), "Must match 2");
tf.AddTerm(new Term(fieldName, "00"));
bits = (FixedBitSet) tf.GetDocIdSet(reader);
Assert.AreEqual(2, bits.Cardinality(), "Must match 2");
reader.Close();
rd.Close();
}
示例3: HelloWorldTest
public void HelloWorldTest()
{
Directory directory = new RAMDirectory();
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_29);
IndexWriter writer = new IndexWriter(directory,
analyzer,
IndexWriter.MaxFieldLength.UNLIMITED);
Document doc = new Document();
doc.Add(new Field("id", "1", Field.Store.YES, Field.Index.NO));
doc.Add(new Field("postBody", "sample test", Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Optimize();
writer.Commit();
writer.Close();
QueryParser parser = new QueryParser(Version.LUCENE_29, "postBody", analyzer);
Query query = parser.Parse("sample test");
//Setup searcher
IndexSearcher searcher = new IndexSearcher(directory, true);
//Do the search
var hits = searcher.Search(query, null, 10);
for (int i = 0; i < hits.TotalHits; i++)
{
var doc1 = hits.ScoreDocs[i];
}
searcher.Close();
directory.Close();
}
示例4: TestDemo_Renamed
public virtual void TestDemo_Renamed()
{
Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_CURRENT);
// Store the index in memory:
Directory directory = new RAMDirectory();
// To store an index on disk, use this instead:
//Directory directory = FSDirectory.open("/tmp/testindex");
IndexWriter iwriter = new IndexWriter(directory, analyzer, true, new IndexWriter.MaxFieldLength(25000));
Document doc = new Document();
System.String text = "This is the text to be indexed.";
doc.Add(new Field("fieldname", text, Field.Store.YES, Field.Index.ANALYZED));
iwriter.AddDocument(doc);
iwriter.Close();
// Now search the index:
IndexSearcher isearcher = new IndexSearcher(directory, true); // read-only=true
// Parse a simple query that searches for "text":
QueryParser parser = new QueryParser(Util.Version.LUCENE_CURRENT, "fieldname", analyzer);
Query query = parser.Parse("text");
ScoreDoc[] hits = isearcher.Search(query, null, 1000).ScoreDocs;
Assert.AreEqual(1, hits.Length);
// Iterate through the results:
for (int i = 0; i < hits.Length; i++)
{
Document hitDoc = isearcher.Doc(hits[i].Doc);
Assert.AreEqual(hitDoc.Get("fieldname"), "This is the text to be indexed.");
}
isearcher.Close();
directory.Close();
}
示例5: TestDetectClose
public virtual void TestDetectClose()
{
Directory dir = new RAMDirectory();
dir.Close();
try
{
dir.CreateOutput("test");
Assert.Fail("did not hit expected exception");
}
catch (AlreadyClosedException ace)
{
}
dir = FSDirectory.Open(new System.IO.FileInfo(Support.AppSettings.Get("tempDir", System.IO.Path.GetTempPath())));
dir.Close();
try
{
dir.CreateOutput("test");
Assert.Fail("did not hit expected exception");
}
catch (AlreadyClosedException ace)
{
}
}
示例6: Main
static void Main(string[] args)
{
int maxLength = GeohashPrefixTree.GetMaxLevelsPossible();
strategy = new RecursivePrefixTreeStrategy(
new GeohashPrefixTree(context, maxLength));
var dir = new RAMDirectory();
var writer = new IndexWriter(dir, new SimpleAnalyzer(), true,
IndexWriter.MaxFieldLength.UNLIMITED);
AddPoint(writer, "London", -81.233040, 42.983390);
AddPoint(writer, "East New York", -73.882360, 40.666770);
AddPoint(writer, "Manhattan", -73.966250, 40.783430);
AddPoint(writer, "New York City", -74.005970, 40.714270);
AddPoint(writer, "Oslo", 10.746090, 59.912730);
AddPoint(writer, "Bergen", 5.324150, 60.392990);
AddPoint(writer, "Washington, D. C.", -77.036370, 38.895110);
writer.Close();
// Origin point - Oslo Spektrum
const double lat = 59.9138688;
const double lng = 10.752245399999993;
const double radius = 600;
var query = strategy.MakeQuery(new SpatialArgs(SpatialOperation.IsWithin,
context.MakeCircle(lng, lat, radius)), fieldInfo);
var searcher = new IndexSearcher(dir);
var results = searcher.Search(query, null, 100);
foreach (var topDoc in results.ScoreDocs)
{
var name = searcher.Doc(topDoc.doc).Get("Name");
Console.WriteLine(name);
}
searcher.Close();
dir.Close();
}
示例7: AssertFoundInText
protected void AssertFoundInText(string whatToIndex, string whatToSearch)
{
Directory d = new RAMDirectory();
IndexWriter writer = new IndexWriter(d, analyzer, true, new IndexWriter.MaxFieldLength(10000));
Document doc = new Document();
doc.Add(new Field("content", whatToIndex, Field.Store.YES, Field.Index.ANALYZED));
writer.AddDocument(doc);
writer.Close();
writer = null;
IndexSearcher searcher = new IndexSearcher(d, true); // read-only=true
QueryParser qp = new QueryParser(Lucene.Net.Util.Version.LUCENE_29, "content", analyzer);
Query query = qp.Parse(whatToSearch);
ScoreDoc[] hits = searcher.Search(query, null, 1000).scoreDocs;
Assert(hits.Length == 1);
searcher.Close();
d.Close();
}
示例8: TestIllegalEOF
public virtual void TestIllegalEOF()
{
RAMDirectory dir = new RAMDirectory();
IndexOutput o = dir.CreateOutput("out");
byte[] b = new byte[1024];
o.WriteBytes(b, 0, 1024);
o.Close();
IndexInput i = dir.OpenInput("out");
i.Seek(1024);
i.Close();
dir.Close();
}
示例9: TestUndeleteAllAfterClose
public virtual void TestUndeleteAllAfterClose()
{
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new WhitespaceAnalyzer(), true);
AddDocumentWithFields(writer);
AddDocumentWithFields(writer);
writer.Close();
IndexReader reader = IndexReader.Open(dir);
reader.DeleteDocument(0);
reader.DeleteDocument(1);
reader.Close();
reader = IndexReader.Open(dir);
reader.UndeleteAll();
Assert.AreEqual(2, reader.NumDocs()); // nothing has really been deleted thanks to undeleteAll()
reader.Close();
dir.Close();
}
示例10: Test_Store_RAMDirectory
public void Test_Store_RAMDirectory()
{
Lucene.Net.Store.RAMDirectory ramDIR = new Lucene.Net.Store.RAMDirectory();
//Index 1 Doc
Lucene.Net.Index.IndexWriter wr = new Lucene.Net.Index.IndexWriter(ramDIR, new Lucene.Net.Analysis.WhitespaceAnalyzer(), true);
Lucene.Net.Documents.Document doc = new Lucene.Net.Documents.Document();
doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
wr.AddDocument(doc);
wr.Close();
//now serialize it
System.Runtime.Serialization.Formatters.Binary.BinaryFormatter serializer = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter();
System.IO.MemoryStream memoryStream = new System.IO.MemoryStream();
serializer.Serialize(memoryStream, ramDIR);
//Close DIR
ramDIR.Close();
ramDIR = null;
//now deserialize
memoryStream.Seek(0, System.IO.SeekOrigin.Begin);
Lucene.Net.Store.RAMDirectory ramDIR2 = (Lucene.Net.Store.RAMDirectory)serializer.Deserialize(memoryStream);
//Add 1 more doc
wr = new Lucene.Net.Index.IndexWriter(ramDIR2, new Lucene.Net.Analysis.WhitespaceAnalyzer(), false);
doc = new Lucene.Net.Documents.Document();
doc.Add(new Lucene.Net.Documents.Field("field1", "value1 value11", Lucene.Net.Documents.Field.Store.YES, Lucene.Net.Documents.Field.Index.TOKENIZED));
wr.AddDocument(doc);
wr.Close();
//Search
Lucene.Net.Search.IndexSearcher s = new Lucene.Net.Search.IndexSearcher(ramDIR2);
Lucene.Net.QueryParsers.QueryParser qp = new Lucene.Net.QueryParsers.QueryParser("field1", new Lucene.Net.Analysis.Standard.StandardAnalyzer());
Lucene.Net.Search.Query q = qp.Parse("value1");
Lucene.Net.Search.TopDocs topDocs = s.Search(q, 100);
s.Close();
Assert.AreEqual(topDocs.TotalHits, 2, "See the issue: LUCENENET-174");
}
示例11: button5_Click
private void button5_Click(object sender, EventArgs e)
{
FilterData.PrepareCharMap();
textBox1.Clear();
DiacriticAnalyzer analyzer = new DiacriticAnalyzer(FilterData.stopWords);
string contents = File.ReadAllText("c:\\1.txt");
TokenStream stream = analyzer.TokenStream(new StringReader(contents));
Token t = null;
while ((t = stream.Next()) != null)
{
textBox1.AppendText(t.TermText() + Environment.NewLine);
}
Store.RAMDirectory dir = new Store.RAMDirectory();
IndexWriter indexer = new IndexWriter(dir, analyzer, true);
Documents.Document doc = new Lucene.Net.Documents.Document();
doc.Add(Documents.Field.Text("contents", contents));
indexer.AddDocument(doc);
indexer.Close();
IndexSearcher searcher = new IndexSearcher(dir);
Hits hits = searcher.Search(QueryParser.Parse("انعمت", "contents", analyzer));
MessageBox.Show(hits.Length().ToString());
searcher.Close();
dir.Close();
}
示例12: TestIndexReload
public void TestIndexReload()
{
try
{
RAMDirectory idxDir = new RAMDirectory();
Document[] docs = BoboTestCase.BuildData();
BoboIndexReader.WorkArea workArea = new BoboIndexReader.WorkArea();
BrowseRequest req;
BrowseSelection sel;
BoboBrowser browser;
BrowseResult result;
IndexWriter writer = new IndexWriter(idxDir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), true, IndexWriter.MaxFieldLength.UNLIMITED);
writer.Close();
int dup = 0;
for (int j = 0; j < 50; j++)
{
IndexReader idxReader = IndexReader.Open(idxDir, true);
BoboIndexReader reader = BoboIndexReader.GetInstance(idxReader, _fconf, workArea);
req = new BrowseRequest();
req.Offset = 0;
req.Count = 10;
sel = new BrowseSelection("color");
sel.AddValue("red");
req.AddSelection(sel);
browser = new BoboBrowser(reader);
result = browser.Browse(req);
Assert.AreEqual(3 * dup, result.NumHits);
req = new BrowseRequest();
req.Offset = 0;
req.Count = 10;
sel = new BrowseSelection("tag");
sel.AddValue("dog");
req.AddSelection(sel);
browser = new BoboBrowser(reader);
result = browser.Browse(req);
Assert.AreEqual(2 * dup, result.NumHits);
req = new BrowseRequest();
req.Offset = 0;
req.Count = 10;
sel = new BrowseSelection("tag");
sel.AddValue("funny");
req.AddSelection(sel);
browser = new BoboBrowser(reader);
result = browser.Browse(req);
Assert.AreEqual(3 * dup, result.NumHits);
writer = new IndexWriter(idxDir, new StandardAnalyzer(Lucene.Net.Util.Version.LUCENE_CURRENT), false, IndexWriter.MaxFieldLength.UNLIMITED);
for (int k = 0; k <= j; k++)
{
for (int i = 0; i < docs.Length; i++)
{
writer.AddDocument(docs[i]);
}
dup++;
}
writer.Close();
}
idxDir.Close();
}
catch (Exception e)
{
Assert.Fail(e.Message);
}
}