2020-07-07 10:10:36 -04:00
|
|
|
import subprocess
|
2020-07-07 10:49:28 -04:00
|
|
|
import json
|
2020-07-07 10:10:36 -04:00
|
|
|
|
|
|
|
from .fixtures import *
|
|
|
|
|
2020-08-04 09:42:30 -04:00
|
|
|
def test_depth_flag_is_accepted(process, disable_extractors_dict):
|
|
|
|
arg_process = subprocess.run(["archivebox", "add", "http://127.0.0.1:8080/static/example.com.html", "--depth=0"],
|
|
|
|
capture_output=True, env=disable_extractors_dict)
|
2020-07-07 11:25:02 -04:00
|
|
|
assert 'unrecognized arguments: --depth' not in arg_process.stderr.decode("utf-8")
|
|
|
|
|
2020-08-18 04:52:56 -04:00
|
|
|
|
2020-08-04 09:42:30 -04:00
|
|
|
def test_depth_flag_fails_if_it_is_not_0_or_1(process, disable_extractors_dict):
|
2020-08-18 04:52:56 -04:00
|
|
|
arg_process = subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=5", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
2020-07-07 11:25:02 -04:00
|
|
|
assert 'invalid choice' in arg_process.stderr.decode("utf-8")
|
2020-08-18 04:52:56 -04:00
|
|
|
arg_process = subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=-1", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
2020-07-07 11:25:02 -04:00
|
|
|
assert 'invalid choice' in arg_process.stderr.decode("utf-8")
|
2020-07-07 10:49:28 -04:00
|
|
|
|
2020-08-18 04:52:56 -04:00
|
|
|
|
2020-08-04 09:42:30 -04:00
|
|
|
def test_depth_flag_0_crawls_only_the_arg_page(tmp_path, process, disable_extractors_dict):
|
2020-08-18 04:52:56 -04:00
|
|
|
arg_process = subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
|
|
|
|
2020-07-07 10:49:28 -04:00
|
|
|
archived_item_path = list(tmp_path.glob('archive/**/*'))[0]
|
|
|
|
with open(archived_item_path / "index.json", "r") as f:
|
|
|
|
output_json = json.load(f)
|
2020-07-17 17:55:56 -04:00
|
|
|
assert output_json["base_url"] == "127.0.0.1:8080/static/example.com.html"
|
2020-07-07 11:07:44 -04:00
|
|
|
|
2020-08-18 04:52:56 -04:00
|
|
|
|
2020-08-04 09:42:30 -04:00
|
|
|
def test_depth_flag_1_crawls_the_page_AND_links(tmp_path, process, disable_extractors_dict):
|
2020-08-18 04:52:56 -04:00
|
|
|
arg_process = subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=1", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
|
|
|
|
2020-07-07 11:07:44 -04:00
|
|
|
with open(tmp_path / "index.json", "r") as f:
|
|
|
|
archive_file = f.read()
|
2020-07-17 17:55:56 -04:00
|
|
|
assert "http://127.0.0.1:8080/static/example.com.html" in archive_file
|
|
|
|
assert "http://127.0.0.1:8080/static/iana.org.html" in archive_file
|
2020-08-18 04:52:56 -04:00
|
|
|
|
|
|
|
|
|
|
|
def test_overwrite_flag_is_accepted(process, disable_extractors_dict):
|
|
|
|
subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
|
|
|
arg_process = subprocess.run(
|
|
|
|
["archivebox", "add", "--overwrite", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
|
|
|
assert 'unrecognized arguments: --overwrite' not in arg_process.stderr.decode("utf-8")
|
|
|
|
assert 'favicon' in arg_process.stdout.decode('utf-8'), 'archive methods probably didnt run, did overwrite work?'
|
2020-08-28 12:08:03 -04:00
|
|
|
|
|
|
|
def test_add_updates_history_json_index(tmp_path, process, disable_extractors_dict):
|
|
|
|
subprocess.run(
|
|
|
|
["archivebox", "add", "--depth=0", "http://127.0.0.1:8080/static/example.com.html"],
|
|
|
|
capture_output=True,
|
|
|
|
env=disable_extractors_dict,
|
|
|
|
)
|
|
|
|
|
|
|
|
with open(tmp_path / "index.json", "r") as f:
|
|
|
|
output_json = json.load(f)
|
|
|
|
assert output_json["links"][0]["history"] != {}
|