add search box to ui and simple search backend
This commit is contained in:
parent
b00498dc42
commit
856db77ab0
|
@ -280,7 +280,7 @@ def dump_index(links, service):
|
|||
link_html = """\
|
||||
<tr>
|
||||
<td>{time}</td>
|
||||
<td><a href="archive/{timestamp}/{base_url}" style="font-size:1.4em;text-decoration:none;color:black;" title="{title}">
|
||||
<td><a class="base-url" href="archive/{timestamp}/{base_url}" style="font-size:1.4em;text-decoration:none;color:black;" title="{title}">
|
||||
<img src="archive/{timestamp}/favicon.ico">
|
||||
{title} <small style="background-color: #eee;border-radius:4px; float:right">{tags}</small>
|
||||
</td>
|
||||
|
@ -408,7 +408,7 @@ def create_archive(export_file, service=None, resume=None):
|
|||
if __name__ == '__main__':
|
||||
argc = len(sys.argv)
|
||||
export_file = sys.argv[1] if argc > 1 else "ril_export.html" # path to export file
|
||||
export_type = sys.argv[2] if argc > 2 else None # select export_type for file format select
|
||||
export_type = sys.argv[2] if argc > 2 else None # select export_type for file format select
|
||||
resume_from = sys.argv[3] if argc > 3 else None # timestamp to resume dowloading from
|
||||
|
||||
create_archive(export_file, export_type, resume=resume_from)
|
||||
|
|
|
@ -60,10 +60,20 @@
|
|||
padding-right: 5px;
|
||||
text-indent: -10000px;
|
||||
}}
|
||||
table tbody tr.matched {{
|
||||
background-color: yellow;
|
||||
}}
|
||||
input {{
|
||||
position: absolute;
|
||||
top: 40px;
|
||||
right: 20px;
|
||||
border-radius: 4px;
|
||||
}}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<input type="search" results="5" autosave="searches" name="searches" placeholder="Search..." onSearch="onSearch(event)">
|
||||
<h1 title="Last modified {}">
|
||||
<img src="https://nicksweeting.com/images/archive.png" height="36px">
|
||||
Archived Sites <img src="https://getpocket.com/favicon.ico" height="36px"> <br/>
|
||||
|
@ -86,5 +96,67 @@
|
|||
</thead>
|
||||
<tbody>{}</tbody>
|
||||
</table>
|
||||
<script>
|
||||
function search_request(pattern, exact, callback) {{
|
||||
var xmlhttp = new XMLHttpRequest();
|
||||
var host = document.location.hostname;
|
||||
var protocol = document.location.protocol;
|
||||
|
||||
xmlhttp.open("GET", protocol + '//' + host + ':8080/search?search=' + encodeURIComponent(pattern) + (exact ? 'exact=1' : ''));
|
||||
xmlhttp.onreadystatechange = function() {{
|
||||
if (xmlhttp.readyState == XMLHttpRequest.DONE) {{
|
||||
if (xmlhttp.status == 200) {{
|
||||
if (xmlhttp.responseText && xmlhttp.responseText.length) {{
|
||||
var results = xmlhttp.responseText.split('\n');
|
||||
console.log('Response: ', results);
|
||||
var results = results.map(function (href) {{
|
||||
return href.split('/')[1] // timestamp as unique key
|
||||
}})
|
||||
highlight_matches(results);
|
||||
}}
|
||||
}} else {{
|
||||
console.log('Error: ' + xmlhttp.statusText)
|
||||
return '';
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
xmlhttp.send();
|
||||
}}
|
||||
|
||||
function highlight_matches(results) {{
|
||||
console.log('highlighting', results);
|
||||
if (results && results.length) {{
|
||||
results = new Set(results);
|
||||
// highlight matching articles
|
||||
var rows = document.querySelectorAll('tbody a.base-url');
|
||||
// console.log(results)
|
||||
for (let elem of rows) {{
|
||||
// console.log(elem)
|
||||
var article_timestamp = elem.href.split('/archive')[1].split('/')[1];
|
||||
var row = elem.parentElement.parentElement;
|
||||
console.log(article_timestamp, results.has(article_timestamp))
|
||||
if (results.has(article_timestamp)) {{
|
||||
row.classList = ['matched'];
|
||||
}} else {{
|
||||
row.classList = [];
|
||||
}}
|
||||
}}
|
||||
}} else {{
|
||||
// clear all highlights
|
||||
var elems = document.querySelectorAll('tbody tr.matched');
|
||||
for (let elem of elems) {{
|
||||
elem.classList = [];
|
||||
}}
|
||||
}};
|
||||
}}
|
||||
|
||||
function onSearch(event) {{
|
||||
if (event.target.value.length) {{
|
||||
search_request(event.target.value, false, highlight_matches);
|
||||
}} else {{
|
||||
highlight_matches([]);
|
||||
}}
|
||||
}}
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
50
search.py
Normal file
50
search.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import sys
|
||||
from subprocess import run, PIPE
|
||||
|
||||
ARCHIVE_PATH = 'bookmarks/archive'
|
||||
|
||||
|
||||
def search_archive(pattern, regex=False):
|
||||
args = '-g' if regex else '-Qg'
|
||||
ag = run(['ag', args, pattern, ARCHIVE_PATH], stdout=PIPE, stderr=PIPE, timeout=60)
|
||||
return (l.decode().replace(ARCHIVE_PATH, '') for l in ag.stdout.splitlines())
|
||||
|
||||
|
||||
def server(port=8080):
|
||||
try:
|
||||
from flask import Flask
|
||||
from flask import request
|
||||
except ImportError:
|
||||
print('[X] Please install Flask to use the search server: pip install Flask')
|
||||
raise SystemExit(1)
|
||||
|
||||
app = Flask('Bookmark Archive')
|
||||
|
||||
@app.route("/search", methods=['GET'])
|
||||
def search():
|
||||
pattern = request.args.get('search', '')
|
||||
use_regex = request.args.get('regex', '')
|
||||
return '\n'.join(search_archive(pattern, use_regex))
|
||||
|
||||
@app.after_request
|
||||
def after_request(response):
|
||||
response.headers.add('Access-Control-Allow-Origin', '*')
|
||||
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
|
||||
response.headers.add('Access-Control-Allow-Methods', 'GET')
|
||||
return response
|
||||
|
||||
app.run(port=port)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
argc = len(sys.argv)
|
||||
if '--server' in sys.argv:
|
||||
port = sys.argv[2] if argc > 2 else '8080'
|
||||
server(port)
|
||||
else:
|
||||
pattern = sys.argv[2] if argc > 2 else sys.argv[1]
|
||||
verbatim = argc > 2 # assumes only possible argument is --exact
|
||||
|
||||
matches = search_archive(pattern, regex=not verbatim)
|
||||
print('\n'.join(matches))
|
Loading…
Reference in a new issue