import os from whoosh.fields import * from whoosh.index import create_in, open_dir from models.distribusi_model import Distribusis from models.distribusi_file_model import DistribusiFiles import flask_apscheduler SCRIPT_DIR = os.path.dirname(__file__) SEARCH_DATA_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, "searchdata")) def init_search_index(APP): scheduler = flask_apscheduler.APScheduler() scheduler.api_enabled = False scheduler.init_app(APP) scheduler.start() schema = Schema( title=TEXT(stored=True), path=ID(stored=True), content=TEXT ) ix = create_in(SEARCH_DATA_DIR, schema) writer = ix.writer() index_distribusis(APP, writer) index_distribusi_files(APP, writer) writer.commit(optimize=True) @scheduler.task("interval", id="update", minutes=60) def update_search_index(): ix = open_dir(SEARCH_DATA_DIR) update_writer = ix.writer() index_distribusis(APP, update_writer) index_distribusi_files(APP, update_writer) update_writer.commit(optimize=True) def index_distribusis(APP, writer): distribusis = _visible_distribusis(APP) for distribusi in distribusis: writer.add_document( title=distribusi.distribusiname, path="/a", content=distribusi.description, ) def index_distribusi_files(APP, writer): with APP.app_context(): for distribusi_file in DistribusiFiles.query.all(): APP.logger.info( f"adding distribusi file {distribusi_file.path} to search index" ) writer.add_document( title=distribusi_file.path, path="/b", content=distribusi_file.description, ) def _visible_distribusis(APP): with APP.app_context(): distribusis = Distribusis.query.filter( Distribusis.visible.isnot(False) ).all() return distribusis