1
0
Fork 0

refactor: Change add() to receive url and depth instead of import_str and import_path

This commit is contained in:
Cristian 2020-07-08 08:17:47 -05:00
parent c1d8a74e4f
commit f12bfeb322
3 changed files with 15 additions and 30 deletions

View file

@ -68,20 +68,12 @@ def main(args: Optional[List[str]]=None, stdin: Optional[IO]=None, pwd: Optional
import_path = command.import_path import_path = command.import_path
add( add(
import_str=import_path, url=import_path,
import_path=None, depth=command.depth,
update_all=command.update_all, update_all=command.update_all,
index_only=command.index_only, index_only=command.index_only,
out_dir=pwd or OUTPUT_DIR, out_dir=pwd or OUTPUT_DIR,
) )
if command.depth == 1:
add(
import_str=None,
import_path=import_path,
update_all=command.update_all,
index_only=command.index_only,
out_dir=pwd or OUTPUT_DIR,
)
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -66,12 +66,10 @@ class AddLinks(View):
if form.is_valid(): if form.is_valid():
url = form.cleaned_data["url"] url = form.cleaned_data["url"]
print(f'[+] Adding URL: {url}') print(f'[+] Adding URL: {url}')
if form.cleaned_data["source"] == "url": depth = 0 if form.cleaned_data["source"] == "url" else 1
key = "import_str"
else:
key = "import_path"
input_kwargs = { input_kwargs = {
key: url, "url": url,
"depth": depth,
"update_all": False, "update_all": False,
"out_dir": OUTPUT_DIR, "out_dir": OUTPUT_DIR,
} }

View file

@ -496,8 +496,8 @@ def status(out_dir: str=OUTPUT_DIR) -> None:
@enforce_types @enforce_types
def add(import_str: Optional[str]=None, def add(url: str,
import_path: Optional[str]=None, depth: int=0,
update_all: bool=not ONLY_NEW, update_all: bool=not ONLY_NEW,
index_only: bool=False, index_only: bool=False,
out_dir: str=OUTPUT_DIR) -> List[Link]: out_dir: str=OUTPUT_DIR) -> List[Link]:
@ -505,17 +505,9 @@ def add(import_str: Optional[str]=None,
check_data_folder(out_dir=out_dir) check_data_folder(out_dir=out_dir)
if (import_str and import_path) or (not import_str and not import_path): base_path = save_stdin_to_sources(url, out_dir=out_dir)
stderr( if depth == 1:
'[X] You should pass an import path or a page url as an argument\n', depth_path = save_file_to_sources(url, out_dir=out_dir)
color='red',
)
raise SystemExit(2)
elif import_str:
import_path = save_stdin_to_sources(import_str, out_dir=out_dir)
elif import_path:
import_path = save_file_to_sources(import_path, out_dir=out_dir)
check_dependencies() check_dependencies()
# Step 1: Load list of links from the existing index # Step 1: Load list of links from the existing index
@ -523,8 +515,11 @@ def add(import_str: Optional[str]=None,
all_links: List[Link] = [] all_links: List[Link] = []
new_links: List[Link] = [] new_links: List[Link] = []
all_links = load_main_index(out_dir=out_dir) all_links = load_main_index(out_dir=out_dir)
if import_path: all_links, new_links = import_new_links(all_links, base_path, out_dir=out_dir)
all_links, new_links = import_new_links(all_links, import_path, out_dir=out_dir) if depth == 1:
all_links, new_links_depth = import_new_links(all_links, depth_path, out_dir=out_dir)
new_links = new_links + new_links_depth
# Step 2: Write updated index with deduped old and new links back to disk # Step 2: Write updated index with deduped old and new links back to disk
write_main_index(links=all_links, out_dir=out_dir) write_main_index(links=all_links, out_dir=out_dir)