CR: Adding trailing lines and removing some trailing spaces.
parent
6dc2b943b4
commit
ccd8021ff9
11
src/index.rs
11
src/index.rs
|
@ -144,7 +144,6 @@ impl Index {
|
||||||
};
|
};
|
||||||
|
|
||||||
let reader = index.reader().map_err(to_pyerr)?;
|
let reader = index.reader().map_err(to_pyerr)?;
|
||||||
println!("reader {}", reader.searcher().segment_readers().len());
|
|
||||||
obj.init(Index { index, reader });
|
obj.init(Index { index, reader });
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -215,6 +214,16 @@ impl Index {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Acquires a Searcher from the searcher pool.
|
||||||
|
///
|
||||||
|
/// If no searcher is available during the call, note that
|
||||||
|
/// this call will block until one is made available.
|
||||||
|
///
|
||||||
|
/// Searcher are automatically released back into the pool when
|
||||||
|
/// they are dropped. If you observe this function to block forever
|
||||||
|
/// you probably should configure the Index to have a larger
|
||||||
|
/// searcher pool, or you are holding references to previous searcher
|
||||||
|
/// for ever.
|
||||||
fn searcher(&self) -> Searcher {
|
fn searcher(&self) -> Searcher {
|
||||||
Searcher {
|
Searcher {
|
||||||
inner: self.reader.searcher(),
|
inner: self.reader.searcher(),
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
from .tantivy import *
|
from .tantivy import *
|
||||||
|
|
|
@ -3,57 +3,56 @@ import pytest
|
||||||
|
|
||||||
from tantivy import Document, Index, SchemaBuilder, Schema
|
from tantivy import Document, Index, SchemaBuilder, Schema
|
||||||
|
|
||||||
|
|
||||||
def schema():
|
def schema():
|
||||||
return SchemaBuilder()\
|
return SchemaBuilder() \
|
||||||
.add_text_field("title", stored=True)\
|
.add_text_field("title", stored=True) \
|
||||||
.add_text_field("body")\
|
.add_text_field("body") \
|
||||||
.build()
|
.build()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="class")
|
@pytest.fixture(scope="class")
|
||||||
def ram_index():
|
def ram_index():
|
||||||
# assume all tests will use the same documents for now
|
# assume all tests will use the same documents for now
|
||||||
# other methods may set up function-local indexes
|
# other methods may set up function-local indexes
|
||||||
index = Index(schema())
|
index = Index(schema())
|
||||||
writer = index.writer()
|
writer = index.writer()
|
||||||
|
|
||||||
# 2 ways of adding documents
|
# 2 ways of adding documents
|
||||||
# 1
|
# 1
|
||||||
doc = Document()
|
doc = Document()
|
||||||
# create a document instance
|
# create a document instance
|
||||||
# add field-value pairs
|
# add field-value pairs
|
||||||
doc.add_text("title", "The Old Man and the Sea")
|
doc.add_text("title", "The Old Man and the Sea")
|
||||||
doc.add_text("body", ("He was an old man who fished alone in a skiff in"
|
doc.add_text("body", ("He was an old man who fished alone in a skiff in"
|
||||||
"the Gulf Stream and he had gone eighty-four days "
|
"the Gulf Stream and he had gone eighty-four days "
|
||||||
"now without taking a fish."))
|
"now without taking a fish."))
|
||||||
writer.add_document(doc)
|
writer.add_document(doc)
|
||||||
# 2 use the built-in json support
|
# 2 use the built-in json support
|
||||||
# keys need to coincide with field names
|
# keys need to coincide with field names
|
||||||
doc = Document.from_dict({
|
doc = Document.from_dict({
|
||||||
"title": "Of Mice and Men",
|
"title": "Of Mice and Men",
|
||||||
"body": ("A few miles south of Soledad, the Salinas River drops "
|
"body": ("A few miles south of Soledad, the Salinas River drops "
|
||||||
"in close to the hillside bank and runs deep and "
|
"in close to the hillside bank and runs deep and "
|
||||||
"green. The water is warm too, for it has slipped "
|
"green. The water is warm too, for it has slipped "
|
||||||
"twinkling over the yellow sands in the sunlight "
|
"twinkling over the yellow sands in the sunlight "
|
||||||
"before reaching the narrow pool. On one side of the "
|
"before reaching the narrow pool. On one side of the "
|
||||||
"river the golden foothill slopes curve up to the "
|
"river the golden foothill slopes curve up to the "
|
||||||
"strong and rocky Gabilan Mountains, but on the valley "
|
"strong and rocky Gabilan Mountains, but on the valley "
|
||||||
"side the water is lined with trees—willows fresh and "
|
"side the water is lined with trees—willows fresh and "
|
||||||
"green with every spring, carrying in their lower leaf "
|
"green with every spring, carrying in their lower leaf "
|
||||||
"junctures the debris of the winter’s flooding; and "
|
"junctures the debris of the winter’s flooding; and "
|
||||||
"sycamores with mottled, white, recumbent limbs and "
|
"sycamores with mottled, white, recumbent limbs and "
|
||||||
"branches that arch over the pool")
|
"branches that arch over the pool")
|
||||||
})
|
})
|
||||||
writer.add_document(doc)
|
writer.add_document(doc)
|
||||||
|
writer.add_json("""{
|
||||||
writer.add_json("""{
|
|
||||||
"title": ["Frankenstein", "The Modern Prometheus"],
|
"title": ["Frankenstein", "The Modern Prometheus"],
|
||||||
"body": "You will rejoice to hear that no disaster has accompanied the commencement of an enterprise which you have regarded with such evil forebodings. I arrived here yesterday, and my first task is to assure my dear sister of my welfare and increasing confidence in the success of my undertaking."
|
"body": "You will rejoice to hear that no disaster has accompanied the commencement of an enterprise which you have regarded with such evil forebodings. I arrived here yesterday, and my first task is to assure my dear sister of my welfare and increasing confidence in the success of my undertaking."
|
||||||
}""")
|
}""")
|
||||||
|
writer.commit()
|
||||||
writer.commit()
|
index.reload()
|
||||||
index.reload()
|
return index
|
||||||
return index
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestClass(object):
|
class TestClass(object):
|
||||||
|
@ -86,7 +85,6 @@ class TestClass(object):
|
||||||
|
|
||||||
assert len(result) == 1
|
assert len(result) == 1
|
||||||
|
|
||||||
|
|
||||||
def test_and_query_parser_default_fields(self, ram_index):
|
def test_and_query_parser_default_fields(self, ram_index):
|
||||||
query = ram_index.parse_query("winter", default_field_names=["title"])
|
query = ram_index.parse_query("winter", default_field_names=["title"])
|
||||||
assert repr(query) == """Query(TermQuery(Term(field=0,bytes=[119, 105, 110, 116, 101, 114])))"""
|
assert repr(query) == """Query(TermQuery(Term(field=0,bytes=[119, 105, 110, 116, 101, 114])))"""
|
||||||
|
@ -98,7 +96,6 @@ class TestClass(object):
|
||||||
"(Should, TermQuery(Term(field=1,bytes=[119, 105, 110, 116, 101, 114])))] " \
|
"(Should, TermQuery(Term(field=1,bytes=[119, 105, 110, 116, 101, 114])))] " \
|
||||||
"})"
|
"})"
|
||||||
|
|
||||||
|
|
||||||
def test_query_errors(self, ram_index):
|
def test_query_errors(self, ram_index):
|
||||||
index = ram_index
|
index = ram_index
|
||||||
# no "bod" field
|
# no "bod" field
|
||||||
|
@ -106,9 +103,9 @@ class TestClass(object):
|
||||||
index.parse_query("bod:men", ["title", "body"])
|
index.parse_query("bod:men", ["title", "body"])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
PATH_TO_INDEX = "tests/test_index/"
|
PATH_TO_INDEX = "tests/test_index/"
|
||||||
|
|
||||||
|
|
||||||
class TestFromDiskClass(object):
|
class TestFromDiskClass(object):
|
||||||
|
|
||||||
def test_exists(self):
|
def test_exists(self):
|
||||||
|
@ -123,29 +120,30 @@ class TestFromDiskClass(object):
|
||||||
def test_create_readers(self):
|
def test_create_readers(self):
|
||||||
# not sure what is the point of this test.
|
# not sure what is the point of this test.
|
||||||
idx = Index(schema())
|
idx = Index(schema())
|
||||||
assert idx.searcher().num_docs == 0
|
assert idx.searcher().num_docs == 0
|
||||||
# by default this is manual mode
|
# by default this is manual mode
|
||||||
writer = idx.writer(30000000, 1)
|
writer = idx.writer(30000000, 1)
|
||||||
writer.add_document(Document(title="mytitle",body="mybody"))
|
writer.add_document(Document(title="mytitle", body="mybody"))
|
||||||
writer.commit()
|
writer.commit()
|
||||||
assert idx.searcher().num_docs == 0
|
assert idx.searcher().num_docs == 0
|
||||||
# Manual is the default setting.
|
# Manual is the default setting.
|
||||||
# In this case, change are reflected only when
|
# In this case, change are reflected only when
|
||||||
# the index is manually reloaded.
|
# the index is manually reloaded.
|
||||||
idx.reload()
|
idx.reload()
|
||||||
assert idx.searcher().num_docs == 1
|
assert idx.searcher().num_docs == 1
|
||||||
idx.config_reader("OnCommit", 4)
|
idx.config_reader("OnCommit", 4)
|
||||||
writer.add_document(Document(title="mytitle2",body="mybody2"))
|
writer.add_document(Document(title="mytitle2", body="mybody2"))
|
||||||
writer.commit()
|
writer.commit()
|
||||||
import time
|
import time
|
||||||
for i in range(50):
|
for i in range(50):
|
||||||
# The index should be automatically reloaded.
|
# The index should be automatically reloaded.
|
||||||
# Wait for at most 5s for it to happen.
|
# Wait for at most 5s for it to happen.
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
if idx.searcher().num_docs == 2:
|
if idx.searcher().num_docs == 2:
|
||||||
return
|
return
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
|
|
||||||
class TestSearcher(object):
|
class TestSearcher(object):
|
||||||
def test_searcher_repr(self, ram_index):
|
def test_searcher_repr(self, ram_index):
|
||||||
assert repr(ram_index.searcher()) == "Searcher(num_docs=3, num_segments=1)"
|
assert repr(ram_index.searcher()) == "Searcher(num_docs=3, num_segments=1)"
|
||||||
|
@ -187,4 +185,4 @@ class TestDocument(object):
|
||||||
|
|
||||||
def test_document_error(self):
|
def test_document_error(self):
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError):
|
||||||
tantivy.Document(name={})
|
tantivy.Document(name={})
|
||||||
|
|
Loading…
Reference in New Issue