2017-05-05 05:00:30 -04:00
|
|
|
#!/usr/bin/env python3
|
2017-06-30 04:23:19 -04:00
|
|
|
# Bookmark Archiver
|
|
|
|
# Nick Sweeting 2017 | MIT License
|
|
|
|
# https://github.com/pirate/bookmark-archiver
|
2017-05-05 05:00:30 -04:00
|
|
|
|
2017-10-30 07:09:33 -04:00
|
|
|
import os
|
2017-05-05 05:00:30 -04:00
|
|
|
import sys
|
2017-05-05 19:36:46 -04:00
|
|
|
|
2017-06-30 04:23:19 -04:00
|
|
|
from datetime import datetime
|
2017-05-29 14:00:46 -04:00
|
|
|
|
2017-10-23 05:56:21 -04:00
|
|
|
from parse import parse_links
|
2017-10-18 18:38:17 -04:00
|
|
|
from links import validate_links
|
|
|
|
from archive_methods import archive_links, _RESULTS_TOTALS
|
|
|
|
from index import (
|
|
|
|
write_links_index,
|
|
|
|
write_link_index,
|
|
|
|
parse_json_links_index,
|
|
|
|
parse_json_link_index,
|
|
|
|
)
|
2017-07-04 06:38:07 -04:00
|
|
|
from config import (
|
|
|
|
ARCHIVE_PERMISSIONS,
|
2017-10-18 18:38:17 -04:00
|
|
|
HTML_FOLDER,
|
2017-07-05 17:33:51 -04:00
|
|
|
ANSI,
|
2017-10-18 18:38:17 -04:00
|
|
|
TIMEOUT,
|
|
|
|
)
|
|
|
|
from util import (
|
|
|
|
download_url,
|
2017-07-04 06:38:07 -04:00
|
|
|
check_dependencies,
|
2017-10-18 18:38:17 -04:00
|
|
|
progress,
|
2017-10-23 05:56:21 -04:00
|
|
|
cleanup_archive,
|
2017-07-04 06:38:07 -04:00
|
|
|
)
|
2017-06-15 18:33:01 -04:00
|
|
|
|
2017-10-23 05:56:21 -04:00
|
|
|
__DESCRIPTION__ = 'Bookmark Archiver: Create a browsable html archive of a list of links.'
|
2017-07-04 06:38:07 -04:00
|
|
|
__DOCUMENTATION__ = 'https://github.com/pirate/bookmark-archiver'
|
2017-05-05 07:27:05 -04:00
|
|
|
|
2017-10-30 07:09:33 -04:00
|
|
|
def print_help():
|
|
|
|
print(__DESCRIPTION__)
|
|
|
|
print("Documentation: {}\n".format(__DOCUMENTATION__))
|
|
|
|
print("Usage:")
|
|
|
|
print(" ./archive.py ~/Downloads/bookmarks_export.html\n")
|
2017-06-30 04:23:19 -04:00
|
|
|
|
2017-10-30 07:09:33 -04:00
|
|
|
|
|
|
|
def get_links(new_links_file_path, archive_path=HTML_FOLDER):
|
|
|
|
"""get new links from file and optionally append them to links in existing archive"""
|
|
|
|
# parse and validate the new_links_file
|
|
|
|
raw_links = parse_links(new_links_file_path)
|
|
|
|
valid_links = validate_links(raw_links)
|
|
|
|
|
|
|
|
# merge existing links in archive_path and new links
|
|
|
|
existing_links = []
|
|
|
|
if archive_path:
|
|
|
|
existing_links = parse_json_links_index(archive_path)
|
|
|
|
valid_links = validate_links(existing_links + valid_links)
|
|
|
|
|
|
|
|
num_new_links = len(valid_links) - len(existing_links)
|
|
|
|
print('[*] [{}] Adding {} new links from {} to index'.format(
|
|
|
|
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
num_new_links,
|
|
|
|
new_links_file_path,
|
|
|
|
))
|
|
|
|
|
|
|
|
return valid_links
|
|
|
|
|
|
|
|
def update_archive(archive_path, links, source=None, resume=None, append=True):
|
2017-10-23 05:56:21 -04:00
|
|
|
"""update or create index.html+json given a path to an export file containing new links"""
|
2017-06-30 04:23:19 -04:00
|
|
|
|
2017-10-18 18:38:17 -04:00
|
|
|
start_ts = datetime.now().timestamp()
|
|
|
|
|
2017-10-23 05:56:21 -04:00
|
|
|
# loop over links and archive them
|
2017-10-30 07:09:33 -04:00
|
|
|
archive_links(archive_path, links, source=source, resume=resume)
|
2017-10-23 05:56:21 -04:00
|
|
|
|
|
|
|
# print timing information & summary
|
|
|
|
end_ts = datetime.now().timestamp()
|
2017-10-30 07:09:33 -04:00
|
|
|
seconds = end_ts - start_ts
|
|
|
|
if seconds > 60:
|
|
|
|
duration = '{0:.2f} min'.format(seconds / 60, 2)
|
|
|
|
else:
|
|
|
|
duration = '{0:.2f} sec'.format(seconds, 2)
|
|
|
|
|
2017-10-23 05:56:21 -04:00
|
|
|
print('{}[√] [{}] Archive update complete ({}){}'.format(
|
|
|
|
ANSI['green'],
|
|
|
|
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
|
|
|
|
duration,
|
|
|
|
ANSI['reset'],
|
|
|
|
))
|
|
|
|
print(' - {} entries skipped'.format(_RESULTS_TOTALS['skipped']))
|
|
|
|
print(' - {} entries updated'.format(_RESULTS_TOTALS['succeded']))
|
|
|
|
print(' - {} errors'.format(_RESULTS_TOTALS['failed']))
|
|
|
|
|
|
|
|
|
2017-05-05 05:00:30 -04:00
|
|
|
if __name__ == '__main__':
|
2017-05-06 08:23:49 -04:00
|
|
|
argc = len(sys.argv)
|
2017-06-30 04:23:19 -04:00
|
|
|
|
2017-10-30 07:09:33 -04:00
|
|
|
if argc < 2 or set(sys.argv).intersection('-h', '--help', 'help'):
|
|
|
|
print_help()
|
2017-06-30 04:23:19 -04:00
|
|
|
raise SystemExit(0)
|
|
|
|
|
2017-10-30 07:09:33 -04:00
|
|
|
source = sys.argv[1] # path to export file
|
|
|
|
resume = sys.argv[2] if argc > 2 else None # timestamp to resume dowloading from
|
|
|
|
|
|
|
|
# See if archive folder already exists
|
|
|
|
for out_folder in (HTML_FOLDER, 'bookmarks', 'pocket', 'pinboard', 'html'):
|
|
|
|
if os.path.exists(out_folder):
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
out_folder = HTML_FOLDER
|
|
|
|
|
|
|
|
archive_path = os.path.join(out_folder, 'archive')
|
|
|
|
|
|
|
|
# Step 0: Download url to local file (only happens if a URL is specified instead of local path)
|
|
|
|
if any(source.startswith(s) for s in ('http://', 'https://', 'ftp://')):
|
|
|
|
source = download_url(source)
|
|
|
|
|
|
|
|
# Step 1: Parse the links and dedupe them with existing archive
|
|
|
|
links = get_links(source, archive_path=archive_path)
|
|
|
|
|
|
|
|
# Step 2: Write new index
|
|
|
|
write_links_index(archive_path, links)
|
|
|
|
|
|
|
|
# Step 3: Verify folder structure is 1:1 with index
|
|
|
|
cleanup_archive(archive_path, links)
|
|
|
|
|
|
|
|
# Step 4: Run the archive methods for each link
|
|
|
|
update_archive(archive_path, links, source=source, resume=resume, append=True)
|