commit all archived files

This commit is contained in:
mappu 2015-01-23 06:22:07 +00:00
parent 98db4785c5
commit c3ba3cbe28
2 changed files with 53 additions and 0 deletions

19
README.md Normal file
View File

@ -0,0 +1,19 @@
# gdget
![](https://img.shields.io/badge/written%20in-bash-blue)
A gallery downloader for Google Drive shared folders.
Tags: scraper
At the time, there doesn't seem to be any 'download all' link for a shared Google Drive folder full of photos. However the HTML is simple to scrape.
- Accepts URLs in the format `drive.google.com/folderview?id=0B_XXXYYY`
- Tested on Cygwin
- Requires Curl
- Works as of 2015-01-23, but i confidently expect breaking changes to the HTML in future
## Download
- [⬇️ gdget.sh](dist-archive/gdget.sh) *(813B)*

34
dist-archive/gdget.sh Normal file
View File

@ -0,0 +1,34 @@
#!/bin/bash
# gdget.sh
url_to_imagelist() {
curl -s "$1" | sed -En 's/.*,,"([^"]+)".+,,,"(0B_[^"]+?)".*/\2\t\1/p'
}
get_image_url() {
curl -s 'https://drive.google.com/uc?id='"$1"'&export=download' \
| grep -Eo 'HREF=".+"' | cut -b7- | rev | cut -b2- | rev
}
download_image() {
local IMG_ID=$(echo "$1" | cut -d$'\t' -f1)
local DESTNM=$(echo "$1" | cut -d$'\t' -f2)
curl $(get_image_url "$IMG_ID") > "$DESTNM"
}
download_imagelist() {
# can't use xargs to a bash function - and the usual workaround is
# blocked by shellshock patches
while read -r line ; do
download_image "$line"
done
}
if [[ "$1" == *drive.google.com/folderview\?id=* ]] ; then
url_to_imagelist "$1" | download_imagelist
exit 0
else
echo "Usage: gdget.com https://drive.google.com/folderview?id=0B_XXXXX" >&2
exit 1
fi