Added guard closes and some docs to download script. Added it to scripts folder. Added download script readme. Added section in root readme.

This commit is contained in:
christos 2020-03-25 22:02:56 +00:00
parent eb3429ccaf
commit e4c4f2e053
3 changed files with 49 additions and 0 deletions

View File

@ -119,6 +119,14 @@ Reading a paper is not the same as reading a blogpost or a novel. Here are a few
* Love a Paper - [@loveapaper](https://twitter.com/loveapaper)
### Download all papers
Open your favourite terminal and run:
```bash
$ ./scripts/download_all.sh
```
## Contributing Guidelines
Please take a look at our [CONTRIBUTING.md](https://github.com/papers-we-love/papers-we-love/blob/master/.github/CONTRIBUTING.md) file.

12
scripts/README.md Normal file
View File

@ -0,0 +1,12 @@
# Download Utility
A nice little script to download all papers (pdfs) in the repo to their respective folders.
### Usage
Simply open a terminal and run:
```bash
$ ./download_all.sh
```
This can be run from anywhere as long as the script doesn't change location

29
scripts/download_all.sh Executable file
View File

@ -0,0 +1,29 @@
#!/bin/bash
# Guard clause check if required binaries are installed
which wget > /dev/null || { echo "Error: wget not installed." ; exit 1 ; }
which egrep > /dev/null || { echo "Error: egrep not installed." ; exit 1 ; }
which xargs > /dev/null || { echo "Error: xargs not installed." ; exit 1 ; }
# Recursively traverse directories in repo scraping markdown file for URLs
# containing pdfs. Downloads pdfs into respective directories.
download_for_directory() {
cd $1
for f in *; do
if [ -d "$f" ]; then
download_for_directory $f &
fi
done
ls | cat *.md 2> /dev/null \
| egrep -o 'https?://[^ ]+' \
| grep 'pdf' | tr -d ')' \
| xargs --no-run-if-empty wget --no-clobber --quiet --timeout=5 --tries=2
cd ..
echo "$1 done."
}
BASEDIR="$(dirname $0)/.."
download_for_directory $BASEDIR
wait