1#! /bin/bash 2 3# get urls from duckduckgo, then parse them to wget to download 4 5args=("$@") 6site=${args[0]} 7filetype=${args[1]} 8outdir=${args[2]} 9 10if[$#!=3] 11then 12printf"\n\x1b[31mWrong number of arguments\x1b[0m\n\n" 13printf"Usage: duckduckget [site] [filetype] [outdir]\nwhere [site] is the domain to search for files\n[filetype] is extension without preceeding dot\n[outputdir] is the output directory relative to working directory" 14exit1 15fi 16 17if! [[$outdir=~ /$ ]] 18then 19$outdir=$outdir/ 20fi 21 22urls=`curl -silent https://duckduckgo.com/html/\?q\=site:$site%20filetype:$filetype| grep "${filetype}$" | tr -d '\t\r' | grep -v '^[0-9]' | awk '{print "http://"$0}'` 23 24for url in$urls;do 25 wget --cut-dirs=100-P$outdir $url 26done