crunk
1 year ago
6 changed files with 70 additions and 79 deletions
@ -0,0 +1,22 @@ |
|||
import os |
|||
|
|||
from whoosh.fields import * |
|||
from whoosh.index import open_dir |
|||
from whoosh.qparser import QueryParser |
|||
|
|||
from csvparser.csvparser import getfullpublication |
|||
|
|||
SCRIPT_DIR = os.path.dirname(__file__) |
|||
DATA_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "data")) |
|||
|
|||
|
|||
def search(searchinput): |
|||
"""search and get search result titles and return them as book ids""" |
|||
ix = open_dir(DATA_DIR) |
|||
with ix.searcher() as searcher: |
|||
query = QueryParser("content", ix.schema).parse(searchinput) |
|||
search_results = searcher.search(query) |
|||
searched_book_ids = [] |
|||
for book in search_results: |
|||
searched_book_ids.append(book["title"]) |
|||
return searched_book_ids |
@ -1,56 +0,0 @@ |
|||
from whoosh.index import create_in |
|||
from whoosh.fields import * |
|||
from whoosh.qparser import QueryParser |
|||
|
|||
import csv |
|||
import os |
|||
import argparse |
|||
from csvparser.csvparser import getfullpublication |
|||
|
|||
SCRIPT_DIR = os.path.dirname(__file__) |
|||
DATA_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "data")) |
|||
|
|||
|
|||
def index_csv_file(): |
|||
filename = os.path.join(DATA_DIR, "varlib.csv") |
|||
with open(filename, 'r', encoding='utf_8_sig') as libcsv: |
|||
csv_as_dict = csv.DictReader(libcsv) |
|||
for row in csv_as_dict: |
|||
rowcontent = concatenate_csv_row(row) |
|||
writer.add_document(title=row["Id"], path=u"/a", content=rowcontent) |
|||
writer.commit() |
|||
|
|||
def search(searchinput): |
|||
with ix.searcher() as searcher: |
|||
query = QueryParser("content", ix.schema).parse(searchinput) |
|||
results = searcher.search(query) |
|||
bookid = results[0]['title'] |
|||
for book in results: |
|||
bookid = book['title'] |
|||
print(f"result found: {bookid}") |
|||
publication = getfullpublication(bookid) |
|||
print(f"{publication['Author']} - {publication['Title']}") |
|||
|
|||
|
|||
def concatenate_csv_row(row): |
|||
rowcontent = [] |
|||
rowcontent.append(row["Publication"]) |
|||
rowcontent.append(row["Author"]) |
|||
rowcontent.append(row["Fields"]) |
|||
rowcontent.append(row["Type"]) |
|||
rowcontent.append(row["Publishers"]) |
|||
rowcontent.append(row["Highlights"]) |
|||
rowcontent.append(row["Comments"]) |
|||
return ' '.join(rowcontent) |
|||
|
|||
parser = argparse.ArgumentParser() |
|||
parser.add_argument("-s", "--search", type=str) |
|||
args = parser.parse_args() |
|||
searchinput = args.search |
|||
|
|||
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT) |
|||
ix = create_in(DATA_DIR, schema) |
|||
writer = ix.writer() |
|||
index_csv_file() |
|||
print(searchinput) |
|||
search(searchinput) |
@ -1,23 +1,42 @@ |
|||
let searchInput = document.getElementById('booksearch'); |
|||
let timeout = null; |
|||
// Listen for keystroke events
|
|||
searchInput.addEventListener('keyup', function (e) { |
|||
// Clear the timeout if it has already been set.
|
|||
clearTimeout(timeout); |
|||
// Make a new timeout set to go off in 1000ms (1 second)
|
|||
timeout = setTimeout(function () { |
|||
if (searchInput.value.length > 2) { |
|||
searchTags(searchInput.value); |
|||
} else { |
|||
clearSearchTags(); |
|||
} |
|||
}, 1000); |
|||
}); |
|||
var allpublications = document.getElementsByClassName("filter"); |
|||
|
|||
function searchTags(searchInput) { |
|||
console.log(searchInput); |
|||
const ENTER_KEY_CODE = 13; |
|||
|
|||
searchInput.addEventListener('keyup', function(e) { |
|||
if (e.keyCode === ENTER_KEY_CODE) { |
|||
if (searchInput.value.length > 2) { |
|||
searchBooks(searchInput.value); |
|||
} else { |
|||
clearSearchBooks(); |
|||
} |
|||
} |
|||
}) |
|||
|
|||
function searchBooks(searchQuery) { |
|||
let searchUrl = `search/${searchQuery}` |
|||
fetch(searchUrl) |
|||
.then(response => response.json()) |
|||
.then(searchdata => { |
|||
console.log(`book ids: ${searchdata} found for ${searchQuery}`); |
|||
if (searchdata === undefined || searchdata.length == 0) return; |
|||
for (i = 0; i < allpublications.length; i++) { |
|||
removeClass(allpublications[i], "show"); |
|||
} |
|||
searchdata.forEach(bookid => { |
|||
showBookId(bookid) |
|||
}); |
|||
}) |
|||
} |
|||
|
|||
function clearSearchTags() { |
|||
console.log("stop search"); |
|||
function showBookId(bookid) { |
|||
let book = document.getElementById(bookid) |
|||
addClass(book, "show"); |
|||
} |
|||
|
|||
|
|||
function clearSearchBooks() { |
|||
for (i = 0; i < allpublications.length; i++) { |
|||
addClass(allpublications[i], "show"); |
|||
} |
|||
} |
|||
|
Loading…
Reference in new issue