search as part of flask
This commit is contained in:
parent
ff7189af66
commit
52b513bc2a
@ -2,6 +2,7 @@
|
||||
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
|
||||
import bcrypt
|
||||
@ -19,6 +20,7 @@ from csvparser.csvparser import (editborrowedby, getfullpublication,
|
||||
getlicenses, getpublications, gettypes,
|
||||
getyears, writepublication)
|
||||
from rnrfeed.rnrfeeder import getevents, getlatestevent
|
||||
from search import search
|
||||
from uploadform import PublicationForm
|
||||
|
||||
APP = create_app()
|
||||
@ -87,9 +89,11 @@ def show_book(publicationID):
|
||||
)
|
||||
|
||||
|
||||
@APP.route("/search", methods=["GET"])
|
||||
def searchbooks():
|
||||
return
|
||||
@APP.route("/search/<search_query>", methods=["GET"])
|
||||
def searchbooks(search_query):
|
||||
print(f"Searched for {search_query}")
|
||||
search_results = search(search_query)
|
||||
return json.dumps(search_results)
|
||||
|
||||
|
||||
@APP.route("/pastevents")
|
||||
|
22
library/search.py
Normal file
22
library/search.py
Normal file
@ -0,0 +1,22 @@
|
||||
import os
|
||||
|
||||
from whoosh.fields import *
|
||||
from whoosh.index import open_dir
|
||||
from whoosh.qparser import QueryParser
|
||||
|
||||
from csvparser.csvparser import getfullpublication
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(__file__)
|
||||
DATA_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "data"))
|
||||
|
||||
|
||||
def search(searchinput):
|
||||
"""search and get search result titles and return them as book ids"""
|
||||
ix = open_dir(DATA_DIR)
|
||||
with ix.searcher() as searcher:
|
||||
query = QueryParser("content", ix.schema).parse(searchinput)
|
||||
search_results = searcher.search(query)
|
||||
searched_book_ids = []
|
||||
for book in search_results:
|
||||
searched_book_ids.append(book["title"])
|
||||
return searched_book_ids
|
@ -1,56 +0,0 @@
|
||||
from whoosh.index import create_in
|
||||
from whoosh.fields import *
|
||||
from whoosh.qparser import QueryParser
|
||||
|
||||
import csv
|
||||
import os
|
||||
import argparse
|
||||
from csvparser.csvparser import getfullpublication
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(__file__)
|
||||
DATA_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "data"))
|
||||
|
||||
|
||||
def index_csv_file():
|
||||
filename = os.path.join(DATA_DIR, "varlib.csv")
|
||||
with open(filename, 'r', encoding='utf_8_sig') as libcsv:
|
||||
csv_as_dict = csv.DictReader(libcsv)
|
||||
for row in csv_as_dict:
|
||||
rowcontent = concatenate_csv_row(row)
|
||||
writer.add_document(title=row["Id"], path=u"/a", content=rowcontent)
|
||||
writer.commit()
|
||||
|
||||
def search(searchinput):
|
||||
with ix.searcher() as searcher:
|
||||
query = QueryParser("content", ix.schema).parse(searchinput)
|
||||
results = searcher.search(query)
|
||||
bookid = results[0]['title']
|
||||
for book in results:
|
||||
bookid = book['title']
|
||||
print(f"result found: {bookid}")
|
||||
publication = getfullpublication(bookid)
|
||||
print(f"{publication['Author']} - {publication['Title']}")
|
||||
|
||||
|
||||
def concatenate_csv_row(row):
|
||||
rowcontent = []
|
||||
rowcontent.append(row["Publication"])
|
||||
rowcontent.append(row["Author"])
|
||||
rowcontent.append(row["Fields"])
|
||||
rowcontent.append(row["Type"])
|
||||
rowcontent.append(row["Publishers"])
|
||||
rowcontent.append(row["Highlights"])
|
||||
rowcontent.append(row["Comments"])
|
||||
return ' '.join(rowcontent)
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-s", "--search", type=str)
|
||||
args = parser.parse_args()
|
||||
searchinput = args.search
|
||||
|
||||
schema = Schema(title=TEXT(stored=True), path=ID(stored=True), content=TEXT)
|
||||
ix = create_in(DATA_DIR, schema)
|
||||
writer = ix.writer()
|
||||
index_csv_file()
|
||||
print(searchinput)
|
||||
search(searchinput)
|
@ -1,4 +1,3 @@
|
||||
// Filter section ===================== old school code divider ================
|
||||
|
||||
filterSelection("all", "None");
|
||||
function filterSelection(c, id) {
|
||||
@ -29,11 +28,14 @@ function resetDropDownButtons(){
|
||||
document.getElementById("License").innerText = "License";
|
||||
document.getElementById("PubType").innerText = "Type";
|
||||
document.getElementById("Year").innerText = "Year";
|
||||
document.getElementById('booksearch').value= "";
|
||||
document.getElementById('booksearch').placeholder = "🔍 Search..";
|
||||
allactivebuttons = document.getElementsByClassName("activebtn");
|
||||
for(var i = 0;allactivebuttons.length; i++) {
|
||||
removeClass(allactivebuttons[i], "activebtn");
|
||||
}
|
||||
}
|
||||
|
||||
function addClass(element, name) {
|
||||
var i, arr1, arr2;
|
||||
arr1 = element.className.split(" ");
|
||||
|
@ -1,23 +1,42 @@
|
||||
let searchInput = document.getElementById('booksearch');
|
||||
let timeout = null;
|
||||
// Listen for keystroke events
|
||||
searchInput.addEventListener('keyup', function (e) {
|
||||
// Clear the timeout if it has already been set.
|
||||
clearTimeout(timeout);
|
||||
// Make a new timeout set to go off in 1000ms (1 second)
|
||||
timeout = setTimeout(function () {
|
||||
if (searchInput.value.length > 2) {
|
||||
searchTags(searchInput.value);
|
||||
} else {
|
||||
clearSearchTags();
|
||||
}
|
||||
}, 1000);
|
||||
});
|
||||
var allpublications = document.getElementsByClassName("filter");
|
||||
|
||||
function searchTags(searchInput) {
|
||||
console.log(searchInput);
|
||||
const ENTER_KEY_CODE = 13;
|
||||
|
||||
searchInput.addEventListener('keyup', function(e) {
|
||||
if (e.keyCode === ENTER_KEY_CODE) {
|
||||
if (searchInput.value.length > 2) {
|
||||
searchBooks(searchInput.value);
|
||||
} else {
|
||||
clearSearchBooks();
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
function searchBooks(searchQuery) {
|
||||
let searchUrl = `search/${searchQuery}`
|
||||
fetch(searchUrl)
|
||||
.then(response => response.json())
|
||||
.then(searchdata => {
|
||||
console.log(`book ids: ${searchdata} found for ${searchQuery}`);
|
||||
if (searchdata === undefined || searchdata.length == 0) return;
|
||||
for (i = 0; i < allpublications.length; i++) {
|
||||
removeClass(allpublications[i], "show");
|
||||
}
|
||||
searchdata.forEach(bookid => {
|
||||
showBookId(bookid)
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
function clearSearchTags() {
|
||||
console.log("stop search");
|
||||
function showBookId(bookid) {
|
||||
let book = document.getElementById(bookid)
|
||||
addClass(book, "show");
|
||||
}
|
||||
|
||||
|
||||
function clearSearchBooks() {
|
||||
for (i = 0; i < allpublications.length; i++) {
|
||||
addClass(allpublications[i], "show");
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
</div>
|
||||
<div id="bookshelf">
|
||||
{% for id, pubinfo in publications.items() %}
|
||||
<div class='book filter {{ pubinfo["Type"] }} {{ pubinfo["Year"] }} {{ pubinfo["License"] }}'>
|
||||
<div id="{{ id }}" class='book filter {{ pubinfo["Type"] }} {{ pubinfo["Year"] }} {{ pubinfo["License"] }}'>
|
||||
<a href='{{ id }}'>
|
||||
<table>
|
||||
<tbody>
|
||||
|
Loading…
Reference in New Issue
Block a user