From 34bed7a2c63e1625b5c15f0b0d832b4d12f75993 Mon Sep 17 00:00:00 2001 From: ArafatX Date: Tue, 25 Oct 2022 19:18:13 +0800 Subject: [PATCH] Update new code Added test command to test dropbox account and run test upload, share, and delete. eg: test output: Test file uploading ... > Uploading "/tmp/dropbox_uploader_test_file" to "/dropbox_uploader/dropbox_uploader_test_file"... DONE [ OK ] Test file sharing ... > Share link: https://www.dropbox.com/s/e6xul4bjyk8xdlx/dropbox_uploader_test_file?dl=0 [ OK ] Test file deleting ... > Deleting "/dropbox_uploader/dropbox_uploader_test_file"... DONE [ OK ] Removing local test file ... The above command 'test' will run automatically after a file config has been created. Added command to show script version eg: version output Dropbox Uploader v1.0 Added missing return 1 logic for some codes --- dropbox_uploader.sh | 2715 ++++++++++++++++++++++--------------------- 1 file changed, 1389 insertions(+), 1326 deletions(-) diff --git a/dropbox_uploader.sh b/dropbox_uploader.sh index b9926c2..3901e18 100755 --- a/dropbox_uploader.sh +++ b/dropbox_uploader.sh @@ -67,7 +67,8 @@ APP_CREATE_URL="https://www.dropbox.com/developers/apps" RESPONSE_FILE="$TMP_DIR/du_resp_$RANDOM" CHUNK_FILE="$TMP_DIR/du_chunk_$RANDOM" TEMP_FILE="$TMP_DIR/du_tmp_$RANDOM" -AUTH_ACCESS_TOKEN_EXPIRE="0" +OAUTH_ACCESS_TOKEN_EXPIRE="0" +OAUTH_ACCESS_TOKEN="" BIN_DEPS="sed basename date grep stat dd mkdir" VERSION="1.0" @@ -75,1487 +76,1532 @@ umask 077 #Check the shell if [ -z "$BASH_VERSION" ]; then - echo -e "Error: this script requires the BASH shell!" - exit 1 + echo -e "Error: this script requires the BASH shell!" + exit 1 fi shopt -s nullglob #Bash allows filename patterns which match no files to expand to a null string, rather than themselves shopt -s dotglob #Bash includes filenames beginning with a "." in the results of filename expansion #Check temp folder -if [[ ! -d "$TMP_DIR" ]]; then - echo -e "Error: the temporary folder $TMP_DIR doesn't exists!" - echo -e "Please edit this script and set the TMP_DIR variable to a valid temporary folder to use." - exit 1 +if [[ ! -d "${TMP_DIR}" ]]; then + echo -e "Error: the temporary folder ${TMP_DIR} doesn't exists!" + echo -e "Please edit this script and set the TMP_DIR variable to a valid temporary folder to use." + exit 1 fi #Look for optional config file parameter while getopts ":qpskdhf:x:" opt; do - case $opt in + case $opt in - f) - CONFIG_FILE=$OPTARG + f) + CONFIG_FILE=$OPTARG ;; - d) - DEBUG=1 + d) + DEBUG=1 ;; - q) - QUIET=1 + q) + QUIET=1 ;; - p) - SHOW_PROGRESSBAR=1 + p) + SHOW_PROGRESSBAR=1 ;; - k) - CURL_ACCEPT_CERTIFICATES="-k" + k) + CURL_ACCEPT_CERTIFICATES="-k" ;; - s) - SKIP_EXISTING_FILES=1 + s) + SKIP_EXISTING_FILES=1 ;; - h) - HUMAN_READABLE_SIZE=1 + h) + HUMAN_READABLE_SIZE=1 ;; - x) - EXCLUDE+=( $OPTARG ) + x) + EXCLUDE+=("$OPTARG") ;; - \?) - echo "Invalid option: -$OPTARG" >&2 - exit 1 + \?) + echo "Invalid option: -$OPTARG" >&2 + exit 1 ;; - :) - echo "Option -$OPTARG requires an argument." >&2 - exit 1 + :) + echo "Option -$OPTARG requires an argument." >&2 + exit 1 ;; esac done -if [[ $DEBUG != 0 ]]; then - echo $VERSION - uname -a 2> /dev/null - cat /etc/issue 2> /dev/null - set -x - RESPONSE_FILE="$TMP_DIR/du_resp_debug" +if [[ $DEBUG -ne 0 ]]; then + echo $VERSION + uname -a 2>/dev/null + cat /etc/issue 2>/dev/null + set -x + RESPONSE_FILE="$TMP_DIR/du_resp_debug" fi if [[ $CURL_BIN == "" ]]; then - BIN_DEPS="$BIN_DEPS curl" - CURL_BIN="curl" + BIN_DEPS="$BIN_DEPS curl" + CURL_BIN="curl" fi #Dependencies check -which $BIN_DEPS > /dev/null -if [[ $? != 0 ]]; then - for i in $BIN_DEPS; do - which $i > /dev/null || - NOT_FOUND="$i $NOT_FOUND" - done - echo -e "Error: Required program could not be found: $NOT_FOUND" - exit 1 +which $BIN_DEPS >/dev/null +if [[ $? -ne 0 ]]; then + for BIN_DEP in $BIN_DEPS; do + which "${BIN_DEP}" >/dev/null || + NOT_FOUND="${BIN_DEP} $NOT_FOUND" + done + echo -e "Error: Required program could not be found: $NOT_FOUND" + exit 1 fi #Check if readlink is installed and supports the -m option #It's not necessary, so no problem if it's not installed -which readlink > /dev/null -if [[ $? == 0 && $(readlink -m "//test" 2> /dev/null) == "/test" ]]; then - HAVE_READLINK=1 +which readlink >/dev/null +if [[ $? -eq 0 && $(readlink -m "//test" 2>/dev/null) == "/test" ]]; then + HAVE_READLINK=1 else - HAVE_READLINK=0 + HAVE_READLINK=0 fi #Forcing to use the builtin printf, if it's present, because it's better #otherwise the external printf program will be used #Note that the external printf command can cause character encoding issues! -builtin printf "" 2> /dev/null -if [[ $? == 0 ]]; then - PRINTF="builtin printf" - PRINTF_OPT="-v o" +builtin printf "" 2>/dev/null +if [[ $? -eq 0 ]]; then + PRINTF="builtin printf" + PRINTF_OPT="-v o" else - PRINTF=$(which printf) - if [[ $? != 0 ]]; then - echo -e "Error: Required program could not be found: printf" - fi - PRINTF_OPT="" + PRINTF=$(which printf) + if [[ $? -ne 0 ]]; then + echo -e "Error: Required program could not be found: printf" + fi + PRINTF_OPT="" fi #Print the message based on $QUIET variable -function print -{ - if [[ $QUIET == 0 ]]; then - echo -ne "$1"; - fi +print() { + if [[ $QUIET -eq 0 ]]; then + echo -ne "$1" + fi } #Returns unix timestamp -function utime -{ - date '+%s' +utime() { + date '+%s' } #Remove temporary files -function remove_temp_files -{ - if [[ $DEBUG == 0 ]]; then - rm -fr "$RESPONSE_FILE" - rm -fr "$CHUNK_FILE" - rm -fr "$TEMP_FILE" - fi +remove_temp_files() { + if [[ $DEBUG == 0 ]]; then + rm -fr "$RESPONSE_FILE" + rm -fr "$CHUNK_FILE" + rm -fr "$TEMP_FILE" + fi } #Converts bytes to human readable format -function convert_bytes -{ - if [[ $HUMAN_READABLE_SIZE == 1 && "$1" != "" ]]; then - if (($1 > 1073741824));then - echo $(($1/1073741824)).$(($1%1073741824/100000000))"G"; - elif (($1 > 1048576));then - echo $(($1/1048576)).$(($1%1048576/100000))"M"; - elif (($1 > 1024));then - echo $(($1/1024)).$(($1%1024/100))"K"; - else - echo $1; - fi +convert_bytes() { + if [[ $HUMAN_READABLE_SIZE == 1 && "$1" != "" ]]; then + if (($1 > 1073741824)); then + echo $(($1 / 1073741824)).$(($1 % 1073741824 / 100000000))"G" + elif (($1 > 1048576)); then + echo $(($1 / 1048576)).$(($1 % 1048576 / 100000))"M" + elif (($1 > 1024)); then + echo $(($1 / 1024)).$(($1 % 1024 / 100))"K" else - echo $1; + echo "$1" fi + else + echo "$1" + fi } #Returns the file size in bytes -function file_size -{ - #Generic GNU - SIZE=$(stat --format="%s" "$1" 2> /dev/null) - if [ $? -eq 0 ]; then - echo $SIZE - return - fi - - #Some embedded linux devices - SIZE=$(stat -c "%s" "$1" 2> /dev/null) - if [ $? -eq 0 ]; then - echo $SIZE - return - fi - - #BSD, OSX and other OSs - SIZE=$(stat -f "%z" "$1" 2> /dev/null) - if [ $? -eq 0 ]; then - echo $SIZE - return - fi - - echo "0" +file_size() { + local size + #Generic GNU + size=$(stat --format="%s" "$1" 2>/dev/null) + if [ $? -eq 0 ]; then + echo "$size" + return + fi + + #Some embedded linux devices + size=$(stat -c "%s" "$1" 2>/dev/null) + if [ $? -eq 0 ]; then + echo "$size" + return + fi + + #BSD, OSX and other OSs + size=$(stat -f "%z" "$1" 2>/dev/null) + if [ $? -eq 0 ]; then + echo "$size" + return + fi + + echo "0" } - #Usage -function usage -{ - echo -e "Dropbox Uploader v$VERSION" - echo -e "Andrea Fabrizi - andrea.fabrizi@gmail.com\n" - echo -e "Usage: $0 [PARAMETERS] COMMAND..." - echo -e "\nCommands:" - - echo -e "\t upload " - echo -e "\t download [LOCAL_FILE/DIR]" - echo -e "\t delete " - echo -e "\t move " - echo -e "\t copy " - echo -e "\t mkdir " - echo -e "\t list [REMOTE_DIR]" - echo -e "\t monitor [REMOTE_DIR] [TIMEOUT]" - echo -e "\t share " - echo -e "\t saveurl " - echo -e "\t search " - echo -e "\t info" - echo -e "\t space" - echo -e "\t unlink" - - echo -e "\nOptional parameters:" - echo -e "\t-f Load the configuration file from a specific file" - echo -e "\t-s Skip already existing files when download/upload. Default: Overwrite" - echo -e "\t-d Enable DEBUG mode" - echo -e "\t-q Quiet mode. Don't show messages" - echo -e "\t-h Show file sizes in human readable format" - echo -e "\t-p Show cURL progress meter" - echo -e "\t-k Doesn't check for SSL certificates (insecure)" - echo -e "\t-x Ignores/excludes directories or files from syncing. -x filename -x directoryname. example: -x .git" - - echo -en "\nFor more info and examples, please see the README file.\n\n" - remove_temp_files - exit 1 +usage() { + echo -e "Dropbox Uploader v$VERSION" + echo -e "Andrea Fabrizi - andrea.fabrizi@gmail.com\n" + echo -e "Usage: $0 [PARAMETERS] COMMAND..." + echo -e "\nCommands:" + + echo -e "\t upload " + echo -e "\t download [LOCAL_FILE/DIR]" + echo -e "\t delete " + echo -e "\t move " + echo -e "\t copy " + echo -e "\t mkdir " + echo -e "\t list [REMOTE_DIR]" + echo -e "\t monitor [REMOTE_DIR] [TIMEOUT]" + echo -e "\t share " + echo -e "\t saveurl " + echo -e "\t search " + echo -e "\t info" + echo -e "\t space" + echo -e "\t unlink" + echo -e "\t test" + echo -e "\t version" + + echo -e "\nOptional parameters:" + echo -e "\t-f Load the configuration file from a specific file" + echo -e "\t-s Skip already existing files when download/upload. Default: Overwrite" + echo -e "\t-d Enable DEBUG mode" + echo -e "\t-q Quiet mode. Don't show messages" + echo -e "\t-h Show file sizes in human readable format" + echo -e "\t-p Show cURL progress meter" + echo -e "\t-k Doesn't check for SSL certificates (insecure)" + echo -e "\t-x Ignores/excludes directories or files from syncing. -x filename -x directoryname. example: -x .git" + + echo -en "\nFor more info and examples, please see the README file.\n\n" + remove_temp_files + exit 1 } #Check the curl exit code -function check_http_response -{ - CODE=$? - - #Checking curl exit code - case $CODE in +check_http_response() { + local code + code=$1 + #Checking curl exit code - #OK - 0) + case $code in + #OK - ;; + 0) ;; - #Proxy error - 5) - print "\nError: Couldn't resolve proxy. The given proxy host could not be resolved.\n" + #Proxy error + 5) + print "\nError: Couldn't resolve proxy. The given proxy host could not be resolved.\n" - remove_temp_files - exit 1 - ;; + remove_temp_files + exit 1 + ;; - #Missing CA certificates - 60|58|77) - print "\nError: cURL is not able to performs peer SSL certificate verification.\n" - print "Please, install the default ca-certificates bundle.\n" - print "To do this in a Debian/Ubuntu based system, try:\n" - print " sudo apt-get install ca-certificates\n\n" - print "If the problem persists, try to use the -k option (insecure).\n" + #Missing CA certificates + 60 | 58 | 77) + print "\nError: cURL is not able to performs peer SSL certificate verification.\n" + print "Please, install the default ca-certificates bundle.\n" + print "To do this in a Debian/Ubuntu based system, try:\n" + print " sudo apt-get install ca-certificates\n\n" + print "If the problem persists, try to use the -k option (insecure).\n" - remove_temp_files - exit 1 - ;; + remove_temp_files + exit 1 + ;; - 6) - print "\nError: Couldn't resolve host.\n" + 6) + print "\nError: Couldn't resolve host.\n" - remove_temp_files - exit 1 - ;; + remove_temp_files + exit 1 + ;; - 7) - print "\nError: Couldn't connect to host.\n" + 7) + print "\nError: Couldn't connect to host.\n" - remove_temp_files - exit 1 - ;; + remove_temp_files + exit 1 + ;; - esac + esac - #Checking response file for generic errors - if grep -q "^HTTP/[12].* 400" "$RESPONSE_FILE"; then - ERROR_MSG=$(sed -n -e 's/{"error": "\([^"]*\)"}/\1/p' "$RESPONSE_FILE") + #Checking response file for generic errors + if grep -q "^HTTP/[12].* 400" "$RESPONSE_FILE"; then + local error_msg + error_msg=$(sed -n -e 's/{"error": "\([^"]*\)"}/\1/p' "$RESPONSE_FILE") - case $ERROR_MSG in - *access?attempt?failed?because?this?app?is?not?configured?to?have*) - echo -e "\nError: The Permission type/Access level configured doesn't match the DropBox App settings!\nPlease run \"$0 unlink\" and try again." - exit 1 - ;; - esac + case $error_msg in + *access?attempt?failed?because?this?app?is?not?configured?to?have*) + echo -e "\nError: The Permission type/Access level configured doesn't match the DropBox App settings!\nPlease run \"$0 unlink\" and try again." + exit 1 + ;; + esac - fi + fi } # Checks if the access token has expired. If so a new one is created. -function ensure_accesstoken -{ - local now=`date +%s` +ensure_accesstoken() { + local now expires_in - if [[ $OAUTH_ACCESS_TOKEN_EXPIRE > $now ]]; then - return - fi + now=$(date +%s) - $CURL_BIN $CURL_ACCEPT_CERTIFICATES $API_OAUTH_TOKEN -d grant_type=refresh_token -d refresh_token=$OAUTH_REFRESH_TOKEN -u $OAUTH_APP_KEY:$OAUTH_APP_SECRET -o "$RESPONSE_FILE" 2>/dev/null - check_http_response - OAUTH_ACCESS_TOKEN=$(sed -n 's/.*"access_token": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + # This does not do anything + if [[ $OAUTH_ACCESS_TOKEN_EXPIRE > $now ]]; then + return + fi - local expires_in=$(sed -n 's/.*"expires_in": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") + $CURL_BIN $CURL_ACCEPT_CERTIFICATES $API_OAUTH_TOKEN -d grant_type=refresh_token -d refresh_token=$OAUTH_REFRESH_TOKEN -u $OAUTH_APP_KEY:$OAUTH_APP_SECRET -o "$RESPONSE_FILE" 2>/dev/null + check_http_response $? - # one minute safety buffer - OAUTH_ACCESS_TOKEN_EXPIRE=$(($now + $expires_in - 60)) -} + OAUTH_ACCESS_TOKEN=$(sed -n 's/.*"access_token": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") -#Urlencode -function urlencode -{ - #The printf is necessary to correctly decode unicode sequences - local string=$($PRINTF "%s" "${1}") - local strlen=${#string} - local encoded="" - - for (( pos=0 ; pos /dev/null - check_http_response + #Checking if it's a file or a directory + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$file\"}" "$API_METADATA_URL" 2>/dev/null + check_http_response $? - local TYPE=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + type=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - case $TYPE in + case $type in - file) - echo "FILE" - ;; + file) + echo "FILE" + ;; - folder) - echo "DIR" - ;; + folder) + echo "DIR" + ;; - deleted) - echo "ERR" - ;; + deleted) + echo "ERR" + ;; - *) - echo "ERR" - ;; + *) + echo "ERR" + ;; - esac + esac } #Generic upload wrapper around db_upload_file and db_upload_dir functions #$1 = Local source file/dir #$2 = Remote destination file/dir -function db_upload -{ - local SRC=$(normalize_path "$1") - local DST=$(normalize_path "$2") - - for j in "${EXCLUDE[@]}" - do : - if [[ $(echo "$SRC" | grep "$j" | wc -l) -gt 0 ]]; then - print "Skipping excluded file/dir: "$j - return - fi - done - - #Checking if the file/dir exists - if [[ ! -e $SRC && ! -d $SRC ]]; then - print " > No such file or directory: $SRC\n" - ERROR_STATUS=1 - return - fi - - #Checking if the file/dir has read permissions - if [[ ! -r $SRC ]]; then - print " > Error reading file $SRC: permission denied\n" - ERROR_STATUS=1 - return - fi - - TYPE=$(db_stat "$DST") - - #If DST it's a file, do nothing, it's the default behaviour - if [[ $TYPE == "FILE" ]]; then - DST="$DST" - - #if DST doesn't exists and doesn't ends with a /, it will be the destination file name - elif [[ $TYPE == "ERR" && "${DST: -1}" != "/" ]]; then - DST="$DST" - - #if DST doesn't exists and ends with a /, it will be the destination folder - elif [[ $TYPE == "ERR" && "${DST: -1}" == "/" ]]; then - local filename=$(basename "$SRC") - DST="$DST/$filename" - - #If DST it's a directory, it will be the destination folder - elif [[ $TYPE == "DIR" ]]; then - local filename=$(basename "$SRC") - DST="$DST/$filename" - fi - - #It's a directory - if [[ -d $SRC ]]; then - db_upload_dir "$SRC" "$DST" - - #It's a file - elif [[ -e $SRC ]]; then - db_upload_file "$SRC" "$DST" - - #Unsupported object... - else - print " > Skipping not regular file \"$SRC\"\n" +db_upload() { + local src dst type + src=$(normalize_path "$1") + dst=$(normalize_path "$2") + + for j in "${EXCLUDE[@]}"; do + echo "$src" | grep -c "$j" + if [[ $(echo "$src" | grep -c "$j") -gt 0 ]]; then + print "Skipping excluded file/dir: $j" + return fi + done + + #Checking if the file/dir exists + if [[ ! -e $src && ! -d $src ]]; then + print " > No such file or directory: $src\n" + ERROR_STATUS=1 + return 1 + fi + + #Checking if the file/dir has read permissions + if [[ ! -r $src ]]; then + print " > Error reading file $src: permission denied\n" + ERROR_STATUS=1 + return 1 + fi + + type=$(db_stat "$dst") + + #If dst it's a file, do nothing, it's the default behaviour + if [[ $type == "FILE" ]]; then + dst="$dst" + + #if dst doesn't exists and doesn't ends with a /, it will be the destination file name + elif [[ $type == "ERR" && "${dst: -1}" != "/" ]]; then + dst="$dst" + + #if dst doesn't exists and ends with a /, it will be the destination folder + elif [[ $type == "ERR" && "${dst: -1}" == "/" ]]; then + local filename + filename=$(basename "$src") + dst="$dst/$filename" + + #If dst it's a directory, it will be the destination folder + elif [[ $type == "DIR" ]]; then + local filename + filename=$(basename "$src") + dst="$dst/$filename" + fi + + #It's a directory + if [[ -d $src ]]; then + db_upload_dir "$src" "$dst" + + #It's a file + elif [[ -e $src ]]; then + db_upload_file "$src" "$dst" + + #Unsupported object... + else + print " > Skipping not regular file \"$src\"\n" + fi } #Generic upload wrapper around db_chunked_upload_file and db_simple_upload_file #The final upload function will be choosen based on the file size #$1 = Local source file #$2 = Remote destination file -function db_upload_file -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - shopt -s nocasematch - - #Checking not allowed file names - basefile_dst=$(basename "$FILE_DST") - if [[ $basefile_dst == "thumbs.db" || \ - $basefile_dst == "desktop.ini" || \ - $basefile_dst == ".ds_store" || \ - $basefile_dst == "icon\r" || \ - $basefile_dst == ".dropbox" || \ - $basefile_dst == ".dropbox.attr" \ - ]]; then - print " > Skipping not allowed file name \"$FILE_DST\"\n" - return - fi - - shopt -u nocasematch - - #Checking file size - FILE_SIZE=$(file_size "$FILE_SRC") - - #Checking if the file already exists - TYPE=$(db_stat "$FILE_DST") - if [[ $TYPE != "ERR" && $SKIP_EXISTING_FILES == 1 ]]; then - print " > Skipping already existing file \"$FILE_DST\"\n" - return - fi - - # Checking if the file has the correct check sum - if [[ $TYPE != "ERR" ]]; then - sha_src=$(db_sha_local "$FILE_SRC") - sha_dst=$(db_sha "$FILE_DST") - if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then - print "> Skipping file \"$FILE_SRC\", file exists with the same hash\n" - return - fi +db_upload_file() { + local file_src file_dst file_size basefile_dst type + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + + shopt -s nocasematch + + #Checking not allowed file names + basefile_dst=$(basename "$file_dst") + if [[ $basefile_dst == "thumbs.db" || $basefile_dst == "desktop.ini" || $basefile_dst == ".ds_store" || $basefile_dst == "icon\r" || $basefile_dst == ".dropbox" || $basefile_dst == ".dropbox.attr" ]] \ + ; then + print " > Skipping not allowed file name \"$file_dst\"\n" + return + fi + + shopt -u nocasematch + + #Checking file size + file_size=$(file_size "$file_src") + + #Checking if the file already exists + type=$(db_stat "$file_dst") + if [[ $type != "ERR" && $SKIP_EXISTING_FILES == 1 ]]; then + print " > Skipping already existing file \"$file_dst\"\n" + return + fi + + # Checking if the file has the correct check sum + if [[ $type != "ERR" ]]; then + local sha_src sha_dst + sha_src=$(db_sha_local "$file_src") + sha_dst=$(db_sha "$file_dst") + if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then + print "> Skipping file \"$file_src\", file exists with the same hash\n" + return fi + fi - if [[ $FILE_SIZE -gt 157286000 ]]; then - #If the file is greater than 150Mb, the chunked_upload API will be used - db_chunked_upload_file "$FILE_SRC" "$FILE_DST" - else - db_simple_upload_file "$FILE_SRC" "$FILE_DST" - fi + if [[ $file_size -gt 157286000 ]]; then + #If the file is greater than 150Mb, the chunked_upload API will be used + db_chunked_upload_file "$file_src" "$file_dst" + else + db_simple_upload_file "$file_src" "$file_dst" + fi } #Simple file upload #$1 = Local source file #$2 = Remote destination file -function db_simple_upload_file -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then - CURL_PARAMETERS="--progress-bar" - LINE_CR="\n" - else - CURL_PARAMETERS="-L -s" - LINE_CR="" - fi - - print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR" - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST -i --globoff -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$FILE_SRC" "$API_UPLOAD_URL" - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - print "An error occurred requesting /upload\n" - ERROR_STATUS=1 - fi +db_simple_upload_file() { + local file_src file_dst curl_parameters line_cr + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + + if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then + curl_parameters="--progress-bar" + line_cr="\n" + else + curl_parameters="-L -s" + line_cr="" + fi + + print " > Uploading \"$file_src\" to \"$file_dst\"... $line_cr" + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES $curl_parameters -X POST -i --globoff -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$file_dst\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$file_src" "$API_UPLOAD_URL" + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + print "An error occurred requesting /upload\n" + ERROR_STATUS=1 + return 1 + fi } #Chunked file upload #$1 = Local source file #$2 = Remote destination file -function db_chunked_upload_file -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - - if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then - VERBOSE=1 - CURL_PARAMETERS="--progress-bar" - else - VERBOSE=0 - CURL_PARAMETERS="-L -s" - fi - - - - local FILE_SIZE=$(file_size "$FILE_SRC") - local OFFSET=0 - local UPLOAD_ID="" - local UPLOAD_ERROR=0 - local CHUNK_PARAMS="" - - ## Ceil division - let NUMBEROFCHUNK=($FILE_SIZE/1024/1024+$CHUNK_SIZE-1)/$CHUNK_SIZE +db_chunked_upload_file() { + local file_src file_dst curl_parameters file_size offset upload_id upload_error chunk_params \ + session_id chunk_number number_of_chunk + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + + if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then + VERBOSE=1 + curl_parameters="--progress-bar" + else + VERBOSE=0 + curl_parameters="-L -s" + fi + + file_size=$(file_size "$file_src") + offset=0 + upload_id="" + upload_error=0 + chunk_params="" + + ## Ceil division + ((number_of_chunk = (file_size / 1024 / 1024 + CHUNK_SIZE - 1) / CHUNK_SIZE)) + + if [[ $VERBOSE == 1 ]]; then + print " > Uploading \"$file_src\" to \"$file_dst\" by $number_of_chunk chunks ...\n" + else + print " > Uploading \"$file_src\" to \"$file_dst\" by $number_of_chunk chunks " + fi + + #Starting a new upload session + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_START_URL" 2>/dev/null + check_http_response $? + + session_id=$(sed -n 's/{"session_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + + chunk_number=1 + #Uploading chunks... + while [[ $offset != "$file_size" ]]; do + local offset_mb chunk_real_size + ((offset_mb = offset / 1024 / 1024)) + #Create the chunk + dd if="$file_src" of="$CHUNK_FILE" bs=1048576 skip="$offset_mb" count="$CHUNK_SIZE" 2>/dev/null + chunk_real_size=$(file_size "$CHUNK_FILE") if [[ $VERBOSE == 1 ]]; then - print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\" by $NUMBEROFCHUNK chunks ...\n" - else - print " > Uploading \"$FILE_SRC\" to \"$FILE_DST\" by $NUMBEROFCHUNK chunks " + print " >> Uploading chunk $chunk_number of $number_of_chunk\n" fi - #Starting a new upload session + #Uploading the chunk... + echo >"$RESPONSE_FILE" ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_START_URL" 2> /dev/null - check_http_response + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST $curl_parameters --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$session_id\",\"offset\": $offset},\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$CHUNK_FILE" "$API_CHUNKED_UPLOAD_APPEND_URL" + #check_http_response $? not needed, because we have to retry the request in case of error - SESSION_ID=$(sed -n 's/{"session_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + ((offset = offset + chunk_real_size)) + upload_error=0 + if [[ $VERBOSE != 1 ]]; then + print "." + fi + ((chunk_number = chunk_number + 1)) + else + if [[ $VERBOSE != 1 ]]; then + print "*" + fi + ((upload_error = upload_error + 1)) + + #On error, the upload is retried for max 3 times + if [[ $upload_error -gt 2 ]]; then + print " FAILED\n" + print "An error occurred requesting /chunked_upload\n" + ERROR_STATUS=1 + return 1 + fi + fi - chunkNumber=1 - #Uploading chunks... - while ([[ $OFFSET != "$FILE_SIZE" ]]); do + done - let OFFSET_MB=$OFFSET/1024/1024 + upload_error=0 - #Create the chunk - dd if="$FILE_SRC" of="$CHUNK_FILE" bs=1048576 skip=$OFFSET_MB count=$CHUNK_SIZE 2> /dev/null - local CHUNK_REAL_SIZE=$(file_size "$CHUNK_FILE") + #Commit the upload + while (true); do - if [[ $VERBOSE == 1 ]]; then - print " >> Uploading chunk $chunkNumber of $NUMBEROFCHUNK\n" - fi + echo >"$RESPONSE_FILE" + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$session_id\",\"offset\": $offset},\"commit\": {\"path\": \"$file_dst\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_FINISH_URL" 2>/dev/null + #check_http_response $? not needed, because we have to retry the request in case of error - #Uploading the chunk... - echo > "$RESPONSE_FILE" - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST $CURL_PARAMETERS --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"close\": false}" --header "Content-Type: application/octet-stream" --data-binary @"$CHUNK_FILE" "$API_CHUNKED_UPLOAD_APPEND_URL" - #check_http_response not needed, because we have to retry the request in case of error - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - let OFFSET=$OFFSET+$CHUNK_REAL_SIZE - UPLOAD_ERROR=0 - if [[ $VERBOSE != 1 ]]; then - print "." - fi - ((chunkNumber=chunkNumber+1)) - else - if [[ $VERBOSE != 1 ]]; then - print "*" - fi - let UPLOAD_ERROR=$UPLOAD_ERROR+1 - - #On error, the upload is retried for max 3 times - if [[ $UPLOAD_ERROR -gt 2 ]]; then - print " FAILED\n" - print "An error occurred requesting /chunked_upload\n" - ERROR_STATUS=1 - return - fi - fi + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + upload_error=0 + break + else + print "*" + ((upload_error = upload_error + 1)) - done - - UPLOAD_ERROR=0 - - #Commit the upload - while (true); do - - echo > "$RESPONSE_FILE" - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"cursor\": {\"session_id\": \"$SESSION_ID\",\"offset\": $OFFSET},\"commit\": {\"path\": \"$FILE_DST\",\"mode\": \"overwrite\",\"autorename\": true,\"mute\": false}}" --header "Content-Type: application/octet-stream" --data-binary @/dev/null "$API_CHUNKED_UPLOAD_FINISH_URL" 2> /dev/null - #check_http_response not needed, because we have to retry the request in case of error - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - UPLOAD_ERROR=0 - break - else - print "*" - let UPLOAD_ERROR=$UPLOAD_ERROR+1 - - #On error, the commit is retried for max 3 times - if [[ $UPLOAD_ERROR -gt 2 ]]; then - print " FAILED\n" - print "An error occurred requesting /commit_chunked_upload\n" - ERROR_STATUS=1 - return - fi - fi + #On error, the commit is retried for max 3 times + if [[ $upload_error -gt 2 ]]; then + print " FAILED\n" + print "An error occurred requesting /commit_chunked_upload\n" + ERROR_STATUS=1 + return 1 + fi + fi - done + done - print " DONE\n" + print " DONE\n" } #Directory upload #$1 = Local source dir #$2 = Remote destination dir -function db_upload_dir -{ - local DIR_SRC=$(normalize_path "$1") - local DIR_DST=$(normalize_path "$2") +db_upload_dir() { + local dis_src dir_dst + dis_src=$(normalize_path "$1") + dir_dst=$(normalize_path "$2") - #Creatig remote directory - db_mkdir "$DIR_DST" + #Creatig remote directory + db_mkdir "$dir_dst" - for file in "$DIR_SRC/"*; do - db_upload "$file" "$DIR_DST" - done + for file in "$dis_src/"*; do + db_upload "$file" "$dir_dst" + done } #Generic download wrapper #$1 = Remote source file/dir #$2 = Local destination file/dir -function db_download -{ - local SRC=$(normalize_path "$1") - local DST=$(normalize_path "$2") +db_download() { + local src dst dest_dir basedir out_file type src_req + src=$(normalize_path "$1") + dst=$(normalize_path "$2") - TYPE=$(db_stat "$SRC") + type=$(db_stat "$src") - #It's a directory - if [[ $TYPE == "DIR" ]]; then + #It's a directory + if [[ $type == "DIR" ]]; then - #If the DST folder is not specified, I assume that is the current directory - if [[ $DST == "" ]]; then - DST="." - fi + #If the dst folder is not specified, I assume that is the current directory + if [[ $dst == "" ]]; then + dst="." + fi - #Checking if the destination directory exists - if [[ ! -d $DST ]]; then - local basedir="" - else - local basedir=$(basename "$SRC") - fi + #Checking if the destination directory exists + if [[ ! -d $dst ]]; then + basedir="" + else + basedir=$(basename "$src") + fi - local DEST_DIR=$(normalize_path "$DST/$basedir") - print " > Downloading folder \"$SRC\" to \"$DEST_DIR\"... \n" - - if [[ ! -d "$DEST_DIR" ]]; then - print " > Creating local directory \"$DEST_DIR\"... " - mkdir -p "$DEST_DIR" - - #Check - if [[ $? == 0 ]]; then - print "DONE\n" - else - print "FAILED\n" - ERROR_STATUS=1 - return - fi - fi + dest_dir=$(normalize_path "$dst/$basedir") + print " > Downloading folder \"$src\" to \"$dest_dir\"... \n" - if [[ $SRC == "/" ]]; then - SRC_REQ="" - else - SRC_REQ="$SRC" - fi + if [[ ! -d "$dest_dir" ]]; then + print " > Creating local directory \"$dest_dir\"... " + mkdir -p "$dest_dir" + #Check + if [[ $? -eq 0 ]]; then + print "DONE\n" + else + print "FAILED\n" + ERROR_STATUS=1 + return 1 + fi + fi - OUT_FILE=$(db_list_outfile "$SRC_REQ") - if [ $? -ne 0 ]; then - # When db_list_outfile fail, the error message is OUT_FILE - print "$OUT_FILE\n" - ERROR_STATUS=1 - return - fi + if [[ $src == "/" ]]; then + src_req="" + else + src_req="$src" + fi - #For each entry... - while read -r line; do + out_file=$(db_list_outfile "$src_req") + if [ $? -ne 0 ]; then + # When db_list_outfile fail, the error message is out_file + print "$out_file\n" + ERROR_STATUS=1 + return 1 + fi - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} + #For each entry... + while read -r line; do + local file meta type size + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} - #Removing unneeded / - FILE=${FILE##*/} + #Removing unneeded / + file=${file##*/} - if [[ $TYPE == "file" ]]; then - db_download_file "$SRC/$FILE" "$DEST_DIR/$FILE" - elif [[ $TYPE == "folder" ]]; then - db_download "$SRC/$FILE" "$DEST_DIR" - fi + if [[ $type == "file" ]]; then + db_download_file "$src/$file" "$dest_dir/$file" + elif [[ $type == "folder" ]]; then + db_download "$src/$file" "$dest_dir" + fi - done < $OUT_FILE + done <"$out_file" - rm -fr $OUT_FILE + rm -fr "$out_file" - #It's a file - elif [[ $TYPE == "FILE" ]]; then + #It's a file + elif [[ $type == "FILE" ]]; then - #Checking DST - if [[ $DST == "" ]]; then - DST=$(basename "$SRC") - fi + #Checking dst + if [[ $dst == "" ]]; then + dst=$(basename "$src") + fi - #If the destination is a directory, the file will be download into - if [[ -d $DST ]]; then - DST="$DST/$SRC" - fi + #If the destination is a directory, the file will be download into + if [[ -d $dst ]]; then + dst="$dst/$src" + fi - db_download_file "$SRC" "$DST" + db_download_file "$src" "$dst" - #Doesn't exists - else - print " > No such file or directory: $SRC\n" - ERROR_STATUS=1 - return - fi + #Doesn't exists + else + print " > No such file or directory: $src\n" + ERROR_STATUS=1 + return 1 + fi } #Simple file download #$1 = Remote source file #$2 = Local destination file -function db_download_file -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then - CURL_PARAMETERS="-L --progress-bar" - LINE_CR="\n" - else - CURL_PARAMETERS="-L -s" - LINE_CR="" - fi - - #Checking if the file already exists - if [[ -e $FILE_DST && $SKIP_EXISTING_FILES == 1 ]]; then - print " > Skipping already existing file \"$FILE_DST\"\n" - return - fi - - # Checking if the file has the correct check sum - if [[ $TYPE != "ERR" ]]; then - sha_src=$(db_sha "$FILE_SRC") - sha_dst=$(db_sha_local "$FILE_DST") - if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then - print "> Skipping file \"$FILE_SRC\", file exists with the same hash\n" - return - fi - fi - - #Creating the empty file, that for two reasons: - #1) In this way I can check if the destination file is writable or not - #2) Curl doesn't automatically creates files with 0 bytes size - dd if=/dev/zero of="$FILE_DST" count=0 2> /dev/null - if [[ $? != 0 ]]; then - print " > Error writing file $FILE_DST: permission denied\n" - ERROR_STATUS=1 - return - fi - - print " > Downloading \"$FILE_SRC\" to \"$FILE_DST\"... $LINE_CR" - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES $CURL_PARAMETERS -X POST --globoff -D "$RESPONSE_FILE" -o "$FILE_DST" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$FILE_SRC\"}" "$API_DOWNLOAD_URL" - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - rm -fr "$FILE_DST" - ERROR_STATUS=1 - return +db_download_file() { + local file_src file_dst curl_parameters line_cr + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + + if [[ $SHOW_PROGRESSBAR == 1 && $QUIET == 0 ]]; then + curl_parameters="-L --progress-bar" + line_cr="\n" + else + curl_parameters="-L -s" + line_cr="" + fi + + #Checking if the file already exists + if [[ -e $file_dst && $SKIP_EXISTING_FILES == 1 ]]; then + print " > Skipping already existing file \"$file_dst\"\n" + return + fi + + # Checking if the file has the correct check sum + type=$(db_stat "$file_dst") + if [[ $type != "ERR" ]]; then + local sha_src sha_dst + sha_src=$(db_sha "$file_src") + sha_dst=$(db_sha_local "$file_dst") + if [[ $sha_src == $sha_dst && $sha_src != "ERR" ]]; then + print "> Skipping file \"$file_src\", file exists with the same hash\n" + return fi + fi + + #Creating the empty file, that for two reasons: + #1) In this way I can check if the destination file is writable or not + #2) Curl doesn't automatically creates files with 0 bytes size + dd if=/dev/zero of="$file_dst" count=0 2>/dev/null + if [[ $? != 0 ]]; then + print " > Error writing file $file_dst: permission denied\n" + ERROR_STATUS=1 + return 1 + fi + + print " > Downloading \"$file_src\" to \"$file_dst\"... $line_cr" + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES $curl_parameters -X POST --globoff -D "$RESPONSE_FILE" -o "$file_dst" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Dropbox-API-Arg: {\"path\": \"$file_src\"}" "$API_DOWNLOAD_URL" + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + rm -fr "$file_dst" + ERROR_STATUS=1 + return 1 + fi } #Saveurl -#$1 = URL +#$1 = url #$2 = Remote file destination -function db_saveurl -{ - local URL="$1" - local FILE_DST=$(normalize_path "$2") - local FILE_NAME=$(basename "$URL") - - print " > Downloading \"$URL\" to \"$FILE_DST\"..." +db_saveurl() { + local url file_dst file_name job_id + url="$1" + file_dst=$(normalize_path "$2") + file_name=$(basename "$url") + + print " > Downloading \"$url\" to \"$file_dst\"..." + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$file_dst/$file_name\", \"url\": \"$url\"}" "$API_SAVEURL_URL" 2>/dev/null + check_http_response $? + + job_id=$(sed -n 's/.*"async_job_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + if [[ $job_id == "" ]]; then + print " > Error getting the job id\n" + return + fi + + #Checking the status + while (true); do + local status message ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST/$FILE_NAME\", \"url\": \"$URL\"}" "$API_SAVEURL_URL" 2> /dev/null - check_http_response - - JOB_ID=$(sed -n 's/.*"async_job_id": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - if [[ $JOB_ID == "" ]]; then - print " > Error getting the job id\n" - return - fi - - #Checking the status - while (true); do + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"async_job_id\": \"$job_id\"}" "$API_SAVEURL_JOBSTATUS_URL" 2>/dev/null + check_http_response $? - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"async_job_id\": \"$JOB_ID\"}" "$API_SAVEURL_JOBSTATUS_URL" 2> /dev/null - check_http_response + status=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + case $status in - STATUS=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - case $STATUS in + in_progress) + print "+" + ;; - in_progress) - print "+" - ;; + complete) + print " DONE\n" + break + ;; - complete) - print " DONE\n" - break - ;; + failed) + print " ERROR\n" + message=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + print " > Error: $message\n" + break + ;; - failed) - print " ERROR\n" - MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - print " > Error: $MESSAGE\n" - break - ;; - - esac + esac - sleep 2 + sleep 2 - done + done } #Prints account info -function db_account_info -{ - print "Dropbox Uploader v$VERSION\n\n" - print " > Getting info... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_INFO_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - - name=$(sed -n 's/.*"display_name": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo -e "\n\nName:\t\t$name" - - uid=$(sed -n 's/.*"account_id": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo -e "UID:\t\t$uid" - - email=$(sed -n 's/.*"email": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo -e "Email:\t\t$email" - - country=$(sed -n 's/.*"country": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo -e "Country:\t$country" - - echo "" - - else - print "FAILED\n" - ERROR_STATUS=1 - fi +db_account_info() { + print "Dropbox Uploader v$VERSION\n\n" + print " > Getting info... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_INFO_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local name uid email country + name=$(sed -n 's/.*"display_name": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo -e "\n\nName:\t\t$name" + + uid=$(sed -n 's/.*"account_id": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo -e "UID:\t\t$uid" + + email=$(sed -n 's/.*"email": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo -e "Email:\t\t$email" + + country=$(sed -n 's/.*"country": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo -e "Country:\t$country" + + echo "" + + else + print "FAILED\n" + ERROR_STATUS=1 + fi } #Prints account space usage info -function db_account_space -{ - print "Dropbox Uploader v$VERSION\n\n" - print " > Getting space usage info... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_SPACE_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - - quota=$(sed -n 's/.*"allocated": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") - let quota_mb=$quota/1024/1024 - echo -e "\n\nQuota:\t$quota_mb Mb" - - used=$(sed -n 's/.*"used": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") - let used_mb=$used/1024/1024 - echo -e "Used:\t$used_mb Mb" - - let free_mb=$((quota-used))/1024/1024 - echo -e "Free:\t$free_mb Mb" +db_account_space() { + print "Dropbox Uploader v$VERSION\n\n" + print " > Getting space usage info... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" "$API_ACCOUNT_SPACE_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local quota quota_mb used used_mb free_mb + quota=$(sed -n 's/.*"allocated": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") + ((quota_mb = quota / 1024 / 1024)) + echo -e "\n\nQuota:\t$quota_mb Mb" + + used=$(sed -n 's/.*"used": \([0-9]*\).*/\1/p' "$RESPONSE_FILE") + ((used_mb = used / 1024 / 1024)) + echo -e "Used:\t$used_mb Mb" + + ((free_mb = $((quota - used)) / 1024 / 1024)) + echo -e "Free:\t$free_mb Mb" + + echo "" + + else + print "FAILED\n" + ERROR_STATUS=1 + fi +} - echo "" +#Account unlink +db_unlink() { + local answer + echo -ne "Are you sure you want unlink this script from your Dropbox account? [y/n]" + read -r answer + if [[ $answer == "y" ]]; then + rm -fr "$CONFIG_FILE" + echo -ne "DONE\n" + fi +} - else - print "FAILED\n" - ERROR_STATUS=1 - fi +get_status_message() { + local retval + retval="$1" + if [[ "${retval}" -eq 0 ]]; then + echo " [ OK ]" + else + echo " [ FAILED ]" + exit 1 + fi } -#Account unlink -function db_unlink -{ - echo -ne "Are you sure you want unlink this script from your Dropbox account? [y/n]" - read -r answer - if [[ $answer == "y" ]]; then - rm -fr "$CONFIG_FILE" - echo -ne "DONE\n" - fi +#Account test +db_test() { + local temp_file=/tmp/dropbox_uploader_test_file + touch "${temp_file}" + echo "This is a dropbox_uploader test file" >${temp_file} + echo "" + echo "Test file uploading ..." + db_upload /tmp/dropbox_uploader_test_file /dropbox_uploader/dropbox_uploader_test_file + get_status_message $? + echo "" + echo "Test file sharing ..." + db_share /dropbox_uploader/dropbox_uploader_test_file + get_status_message $? + echo "" + echo "Test file deleting ..." + db_delete /dropbox_uploader/dropbox_uploader_test_file + get_status_message $? + echo "" + echo -ne "Removing local test file ...\n" + rm -f "/tmp/dropbox_uploader_test_file" + echo "" } #Delete a remote file #$1 = Remote file to delete -function db_delete -{ - local FILE_DST=$(normalize_path "$1") - - print " > Deleting \"$FILE_DST\"... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\"}" "$API_DELETE_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - ERROR_STATUS=1 - fi +db_delete() { + local file_dst + file_dst=$(normalize_path "$1") + + print " > Deleting \"$file_dst\"... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$file_dst\"}" "$API_DELETE_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + ERROR_STATUS=1 + return 1 + fi } +show_version(){ + echo -e "Dropbox Uploader v$VERSION" +} #Move/Rename a remote file #$1 = Remote file to rename or move #$2 = New file name or location -function db_move -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - TYPE=$(db_stat "$FILE_DST") - - #If the destination it's a directory, the source will be moved into it - if [[ $TYPE == "DIR" ]]; then - local filename=$(basename "$FILE_SRC") - FILE_DST=$(normalize_path "$FILE_DST/$filename") - fi - - print " > Moving \"$FILE_SRC\" to \"$FILE_DST\" ... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_MOVE_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - ERROR_STATUS=1 - fi +db_move() { + local file_src file_dst type + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + + type=$(db_stat "$file_dst") + + #If the destination it's a directory, the source will be moved into it + if [[ $type == "DIR" ]]; then + local filename + filename=$(basename "$file_src") + file_dst=$(normalize_path "$file_dst/$filename") + fi + + print " > Moving \"$file_src\" to \"$file_dst\" ... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$file_src\", \"to_path\": \"$file_dst\"}" "$API_MOVE_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + ERROR_STATUS=1 + return 1 + fi } #Copy a remote file to a remote location #$1 = Remote file to rename or move #$2 = New file name or location -function db_copy -{ - local FILE_SRC=$(normalize_path "$1") - local FILE_DST=$(normalize_path "$2") - - TYPE=$(db_stat "$FILE_DST") - - #If the destination it's a directory, the source will be copied into it - if [[ $TYPE == "DIR" ]]; then - local filename=$(basename "$FILE_SRC") - FILE_DST=$(normalize_path "$FILE_DST/$filename") - fi - - print " > Copying \"$FILE_SRC\" to \"$FILE_DST\" ... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$FILE_SRC\", \"to_path\": \"$FILE_DST\"}" "$API_COPY_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - ERROR_STATUS=1 - fi +db_copy() { + local file_src file_dst type + + file_src=$(normalize_path "$1") + file_dst=$(normalize_path "$2") + type=$(db_stat "$file_dst") + + #If the destination it's a directory, the source will be copied into it + if [[ $type == "DIR" ]]; then + local filename + filename=$(basename "$file_src") + file_dst=$(normalize_path "$file_dst/$filename") + fi + + print " > Copying \"$file_src\" to \"$file_dst\" ... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"from_path\": \"$file_src\", \"to_path\": \"$file_dst\"}" "$API_COPY_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + ERROR_STATUS=1 + return 1 + fi } #Create a new directory #$1 = Remote directory to create -function db_mkdir -{ - local DIR_DST=$(normalize_path "$1") - - print " > Creating Directory \"$DIR_DST\"... " - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\"}" "$API_MKDIR_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - elif grep -q "{\"error_summary\": \"path/conflict/folder/" "$RESPONSE_FILE"; then - print "ALREADY EXISTS\n" - else - print "FAILED\n" - ERROR_STATUS=1 - fi +db_mkdir() { + local dir_dst + dir_dst=$(normalize_path "$1") + + print " > Creating Directory \"$dir_dst\"... " + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$dir_dst\"}" "$API_MKDIR_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + elif grep -q "{\"error_summary\": \"path/conflict/folder/" "$RESPONSE_FILE"; then + print "ALREADY EXISTS\n" + else + print "FAILED\n" + ERROR_STATUS=1 + return 1 + fi } #List a remote folder and returns the path to the file containing the output #$1 = Remote directory #$2 = Cursor (Optional) -function db_list_outfile -{ +db_list_outfile() { + local dir_dst has_more cursor out_file - local DIR_DST="$1" - local HAS_MORE="false" - local CURSOR="" + dir_dst="$1" + has_more="false" + cursor="" - if [[ -n "$2" ]]; then - CURSOR="$2" - HAS_MORE="true" - fi - - OUT_FILE="$TMP_DIR/du_tmp_out_$RANDOM" + if [[ -n "$2" ]]; then + cursor="$2" + has_more="true" + fi - while (true); do + out_file="$TMP_DIR/du_tmp_out_$RANDOM" - if [[ $HAS_MORE == "true" ]]; then - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"cursor\": \"$CURSOR\"}" "$API_LIST_FOLDER_CONTINUE_URL" 2> /dev/null - else - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2> /dev/null - fi + while (true); do - check_http_response + if [[ $has_more == "true" ]]; then + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"cursor\": \"$cursor\"}" "$API_LIST_FOLDER_CONTINUE_URL" 2>/dev/null + else + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$dir_dst\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2>/dev/null + fi - HAS_MORE=$(sed -n 's/.*"has_more": *\([a-z]*\).*/\1/p' "$RESPONSE_FILE") - CURSOR=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE") + check_http_response $? - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + has_more=$(sed -n 's/.*"has_more": *\([a-z]*\).*/\1/p' "$RESPONSE_FILE") + cursor=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE") - #Extracting directory content [...] - #and replacing "}, {" with "}\n{" - #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... - local DIR_CONTENT=$(sed -n 's/.*: \[{\(.*\)/\1/p' "$RESPONSE_FILE" | sed 's/}, *{/}\ + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local dir_content + #Extracting directory content [...] + #and replacing "}, {" with "}\n{" + #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... + dir_content=$(sed -n 's/.*: \[{\(.*\)/\1/p' "$RESPONSE_FILE" | sed 's/}, *{/}\ {/g') - #Converting escaped quotes to unicode format - echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE" - - #Extracting files and subfolders - while read -r line; do + #Converting escaped quotes to unicode format + echo "$dir_content" | sed 's/\\"/\\u0022/' >"$TEMP_FILE" - local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') - local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') - local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')) + #Extracting files and subfolders + while read -r line; do + local file type size + file=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') + type=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') + size=$(convert_bytes "$(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')") - echo -e "$FILE:$TYPE;$SIZE" >> "$OUT_FILE" + echo -e "$file:$type;$size" >>"$out_file" - done < "$TEMP_FILE" + done <"$TEMP_FILE" - if [[ $HAS_MORE == "false" ]]; then - break - fi + if [[ $has_more == "false" ]]; then + break + fi - else - return - fi + else + return + fi - done + done - echo $OUT_FILE + echo "$out_file" } #List remote directory #$1 = Remote directory -function db_list -{ - local DIR_DST=$(normalize_path "$1") - - print " > Listing \"$DIR_DST\"... " - - if [[ "$DIR_DST" == "/" ]]; then - DIR_DST="" +db_list() { + local dir_dst out_file padding + + dir_dst=$(normalize_path "$1") + + print " > Listing \"$dir_dst\"... " + + if [[ "$dir_dst" == "/" ]]; then + dir_dst="" + fi + + out_file=$(db_list_outfile "$dir_dst") + if [ -z "$out_file" ]; then + print "FAILED\n" + ERROR_STATUS=1 + return 1 + else + print "DONE\n" + fi + + #Looking for the biggest file size + #to calculate the padding to use + padding=0 + while read -r line; do + local file meta size + file=${line%:*} + meta=${line##*:} + size=${meta#*;} + + if [[ ${#size} -gt $padding ]]; then + padding=${#size} fi - - OUT_FILE=$(db_list_outfile "$DIR_DST") - if [ -z "$OUT_FILE" ]; then - print "FAILED\n" - ERROR_STATUS=1 - return - else - print "DONE\n" + done <"$out_file" + + #For each entry, printing directories... + while read -r line; do + local file meta type size + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} + + #Removing unneeded / + file=${file##*/} + + if [[ $type == "folder" ]]; then + file=$(echo -e "$file") + $PRINTF " [D] %-${padding}s %s\n" "$size" "$file" fi - #Looking for the biggest file size - #to calculate the padding to use - local padding=0 - while read -r line; do - local FILE=${line%:*} - local META=${line##*:} - local SIZE=${META#*;} - - if [[ ${#SIZE} -gt $padding ]]; then - padding=${#SIZE} - fi - done < "$OUT_FILE" - - #For each entry, printing directories... - while read -r line; do - - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} - - #Removing unneeded / - FILE=${FILE##*/} - - if [[ $TYPE == "folder" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE" - fi - - done < "$OUT_FILE" - - #For each entry, printing files... - while read -r line; do + done <"$out_file" - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} + #For each entry, printing files... + while read -r line; do + local file meta type size + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} - #Removing unneeded / - FILE=${FILE##*/} + #Removing unneeded / + file=${file##*/} - if [[ $TYPE == "file" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE" - fi + if [[ $type == "file" ]]; then + file=$(echo -e "$file") + $PRINTF " [F] %-${padding}s %s\n" "$size" "$file" + fi - done < "$OUT_FILE" + done <"$out_file" - rm -fr "$OUT_FILE" + rm -fr "$out_file" } #Longpoll remote directory only once #$1 = Timeout #$2 = Remote directory -function db_monitor_nonblock -{ - local TIMEOUT=$1 - local DIR_DST=$(normalize_path "$2") +db_monitor_nonblock() { + local timeout dir_dst + timeout=$1 + dir_dst=$(normalize_path "$2") - if [[ "$DIR_DST" == "/" ]]; then - DIR_DST="" - fi + if [[ "$dir_dst" == "/" ]]; then + dir_dst="" + fi - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$DIR_DST\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2> /dev/null - check_http_response - - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - - local CURSOR=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE") - - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Content-Type: application/json" --data "{\"cursor\": \"$CURSOR\",\"timeout\": ${TIMEOUT}}" "$API_LONGPOLL_FOLDER" 2> /dev/null - check_http_response - - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - local CHANGES=$(sed -n 's/.*"changes" *: *\([a-z]*\).*/\1/p' "$RESPONSE_FILE") - else - ERROR_MSG=$(grep "Error in call" "$RESPONSE_FILE") - print "FAILED to longpoll (http error): $ERROR_MSG\n" - ERROR_STATUS=1 - return 1 - fi - - if [[ -z "$CHANGES" ]]; then - print "FAILED to longpoll (unexpected response)\n" - ERROR_STATUS=1 - return 1 - fi + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$dir_dst\",\"include_media_info\": false,\"include_deleted\": false,\"include_has_explicit_shared_members\": false}" "$API_LIST_FOLDER_URL" 2>/dev/null + check_http_response $? - if [ "$CHANGES" == "true" ]; then + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local cursor changes + cursor=$(sed -n 's/.*"cursor": *"\([^"]*\)".*/\1/p' "$RESPONSE_FILE") - OUT_FILE=$(db_list_outfile "$DIR_DST" "$CURSOR") - - if [ -z "$OUT_FILE" ]; then - print "FAILED to list changes\n" - ERROR_STATUS=1 - return - fi + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Content-Type: application/json" --data "{\"cursor\": \"$cursor\",\"timeout\": ${timeout}}" "$API_LONGPOLL_FOLDER" 2>/dev/null + check_http_response $? - #For each entry, printing directories... - while read -r line; do + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + changes=$(sed -n 's/.*"changes" *: *\([a-z]*\).*/\1/p' "$RESPONSE_FILE") + else + local error_msg + error_msg=$(grep "Error in call" "$RESPONSE_FILE") + print "FAILED to longpoll (http error): $error_msg\n" + ERROR_STATUS=1 + return 1 + fi - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} + # NOTE: This code will never check, the variable CHANGES should be in upper function scope + if [[ -z "$changes" ]]; then + print "FAILED to longpoll (unexpected response)\n" + ERROR_STATUS=1 + return 1 + fi - #Removing unneeded / - FILE=${FILE##*/} + if [ "$changes" == "true" ]; then + local out_file + out_file=$(db_list_outfile "$dir_dst" "$cursor") - if [[ $TYPE == "folder" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [D] %s\n" "$FILE" - elif [[ $TYPE == "file" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [F] %s %s\n" "$SIZE" "$FILE" - elif [[ $TYPE == "deleted" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [-] %s\n" "$FILE" - fi + if [ -z "$out_file" ]; then + print "FAILED to list changes\n" + ERROR_STATUS=1 + return 1 + fi - done < "$OUT_FILE" + #For each entry, printing directories... + while read -r line; do + local file meta type size + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} - rm -fr "$OUT_FILE" + #Removing unneeded / + file=${file##*/} + + if [[ $type == "folder" ]]; then + file=$(echo -e "$file") + $PRINTF " [D] %s\n" "$file" + elif [[ $type == "file" ]]; then + file=$(echo -e "$file") + $PRINTF " [F] %s %s\n" "$size" "$file" + elif [[ $type == "deleted" ]]; then + file=$(echo -e "$file") + $PRINTF " [-] %s\n" "$file" fi - else - ERROR_STATUS=1 - return 1 + done <"$out_file" + + rm -fr "$out_file" fi + else + ERROR_STATUS=1 + return 1 + fi + } #Longpoll continuously remote directory #$1 = Timeout #$2 = Remote directory -function db_monitor -{ - local TIMEOUT=$1 - local DIR_DST=$(normalize_path "$2") - - while (true); do - db_monitor_nonblock "$TIMEOUT" "$2" - done +db_monitor() { + local timeout dir_dst + timeout=$1 + dir_dst=$(normalize_path "$2") + + while (true); do + db_monitor_nonblock "$timeout" "$2" + done } #Share remote file #$1 = Remote file -function db_share -{ - local FILE_DST=$(normalize_path "$1") - - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\",\"settings\": {\"requested_visibility\": \"public\"}}" "$API_SHARE_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print " > Share link: " - SHARE_LINK=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo "$SHARE_LINK" - else - get_Share "$FILE_DST" - fi +db_share() { + local file_dst + file_dst=$(normalize_path "$1") + + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$file_dst\",\"settings\": {\"requested_visibility\": \"public\"}}" "$API_SHARE_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local share_link + print " > Share link: " + share_link=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo "$share_link" + else + get_Share "$file_dst" + fi } #Query existing shared link #$1 = Remote file -function get_Share -{ - local FILE_DST=$(normalize_path "$1") - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE_DST\",\"direct_only\": true}" "$API_SHARE_LIST" - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print " > Share link: " - SHARE_LINK=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo "$SHARE_LINK" - else - print "FAILED\n" - MESSAGE=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - print " > Error: $MESSAGE\n" - ERROR_STATUS=1 - fi +get_Share() { + local file_dst + file_dst=$(normalize_path "$1") + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$file_dst\",\"direct_only\": true}" "$API_SHARE_LIST" + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + local share_link + print " > Share link: " + share_link=$(sed -n 's/.*"url": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo "$share_link" + else + local message + print "FAILED\n" + message=$(sed -n 's/.*"error_summary": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + print " > Error: $message\n" + ERROR_STATUS=1 + return 1 + fi } #Search on Dropbox #$1 = query -function db_search -{ - local QUERY="$1" - - print " > Searching for \"$QUERY\"... " - - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"\",\"query\": \"$QUERY\",\"start\": 0,\"max_results\": 1000,\"mode\": \"filename\"}" "$API_SEARCH_URL" 2> /dev/null - check_http_response - - #Check - if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then - print "DONE\n" - else - print "FAILED\n" - ERROR_STATUS=1 - fi - - #Extracting directory content [...] - #and replacing "}, {" with "}\n{" - #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... - local DIR_CONTENT=$(sed 's/}, *{/}\ +db_search() { + local query dir_content + query="$1" + + print " > Searching for \"$query\"... " + + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"\",\"query\": \"$query\",\"start\": 0,\"max_results\": 1000,\"mode\": \"filename\"}" "$API_SEARCH_URL" 2>/dev/null + check_http_response $? + + #Check + if grep -q "^HTTP/[12].* 200" "$RESPONSE_FILE"; then + print "DONE\n" + else + print "FAILED\n" + ERROR_STATUS=1 + fi + + #Extracting directory content [...] + #and replacing "}, {" with "}\n{" + #I don't like this piece of code... but seems to be the only way to do this with SED, writing a portable code... + dir_content=$(sed 's/}, *{/}\ {/g' "$RESPONSE_FILE") - #Converting escaped quotes to unicode format - echo "$DIR_CONTENT" | sed 's/\\"/\\u0022/' > "$TEMP_FILE" - - #Extracting files and subfolders - rm -fr "$RESPONSE_FILE" - while read -r line; do + #Converting escaped quotes to unicode format + echo "$dir_content" | sed 's/\\"/\\u0022/' >"$TEMP_FILE" - local FILE=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') - local TYPE=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') - local SIZE=$(convert_bytes $(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')) + #Extracting files and subfolders + rm -fr "$RESPONSE_FILE" + while read -r line; do + local file type size + file=$(echo "$line" | sed -n 's/.*"path_display": *"\([^"]*\)".*/\1/p') + type=$(echo "$line" | sed -n 's/.*".tag": *"\([^"]*\).*/\1/p') + size=$(convert_bytes "$(echo "$line" | sed -n 's/.*"size": *\([0-9]*\).*/\1/p')") - echo -e "$FILE:$TYPE;$SIZE" >> "$RESPONSE_FILE" + echo -e "$file:$type;$size" >>"$RESPONSE_FILE" - done < "$TEMP_FILE" - - #Looking for the biggest file size - #to calculate the padding to use - local padding=0 - while read -r line; do - local FILE=${line%:*} - local META=${line##*:} - local SIZE=${META#*;} - - if [[ ${#SIZE} -gt $padding ]]; then - padding=${#SIZE} - fi - done < "$RESPONSE_FILE" - - #For each entry, printing directories... - while read -r line; do + done <"$TEMP_FILE" - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} + #Looking for the biggest file size + #to calculate the padding to use + local padding=0 + while read -r line; do + local file meta size + file=${line%:*} + meta=${line##*:} + size=${meta#*;} - if [[ $TYPE == "folder" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [D] %-${padding}s %s\n" "$SIZE" "$FILE" - fi + if [[ ${#size} -gt $padding ]]; then + padding=${#size} + fi + done <"$RESPONSE_FILE" + + #For each entry, printing directories... + while read -r line; do + local file meta type size + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} + + if [[ $type == "folder" ]]; then + file=$(echo -e "$file") + $PRINTF " [D] %-${padding}s %s\n" "$size" "$file" + fi - done < "$RESPONSE_FILE" + done <"$RESPONSE_FILE" - #For each entry, printing files... - while read -r line; do + #For each entry, printing files... + while read -r line; do + local file meta type size - local FILE=${line%:*} - local META=${line##*:} - local TYPE=${META%;*} - local SIZE=${META#*;} + file=${line%:*} + meta=${line##*:} + type=${meta%;*} + size=${meta#*;} - if [[ $TYPE == "file" ]]; then - FILE=$(echo -e "$FILE") - $PRINTF " [F] %-${padding}s %s\n" "$SIZE" "$FILE" - fi + if [[ $type == "file" ]]; then + file=$(echo -e "$file") + $PRINTF " [F] %-${padding}s %s\n" "$size" "$file" + fi - done < "$RESPONSE_FILE" + done <"$RESPONSE_FILE" } #Query the sha256-dropbox-sum of a remote file #see https://www.dropbox.com/developers/reference/content-hash for more information #$1 = Remote file -function db_sha -{ - local FILE=$(normalize_path "$1") - - if [[ $FILE == "/" ]]; then - echo "ERR" - return - fi - - #Checking if it's a file or a directory and get the sha-sum - ensure_accesstoken - $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-Type: application/json" --data "{\"path\": \"$FILE\"}" "$API_METADATA_URL" 2> /dev/null - check_http_response - - local TYPE=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") - if [[ $TYPE == "folder" ]]; then - echo "ERR" - return - fi - - local SHA256=$(sed -n 's/.*"content_hash": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - echo "$SHA256" +db_sha() { + local file type sha256 + file=$(normalize_path "$1") + + if [[ $file == "/" ]]; then + echo "ERR" + return + fi + + #Checking if it's a file or a directory and get the sha-sum + ensure_accesstoken + $CURL_BIN $CURL_ACCEPT_CERTIFICATES -X POST -L -s --show-error --globoff -i -o "$RESPONSE_FILE" --header "Authorization: Bearer $OAUTH_ACCESS_TOKEN" --header "Content-type: application/json" --data "{\"path\": \"$file\"}" "$API_METADATA_URL" 2>/dev/null + check_http_response $? + + type=$(sed -n 's/{".tag": *"*\([^"]*\)"*.*/\1/p' "$RESPONSE_FILE") + if [[ $type == "folder" ]]; then + echo "ERR" + return + fi + + sha256=$(sed -n 's/.*"content_hash": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + echo "$sha256" } #Query the sha256-dropbox-sum of a local file #see https://www.dropbox.com/developers/reference/content-hash for more information #$1 = Local file -function db_sha_local -{ - local FILE=$(normalize_path "$1") - local FILE_SIZE=$(file_size "$FILE") - local OFFSET=0 - local SKIP=0 - local SHA_CONCAT="" - - which shasum > /dev/null - if [[ $? != 0 ]]; then - echo "ERR" - return - fi - - while ([[ $OFFSET -lt "$FILE_SIZE" ]]); do - dd if="$FILE" of="$CHUNK_FILE" bs=4194304 skip=$SKIP count=1 2> /dev/null - local SHA=$(shasum -a 256 "$CHUNK_FILE" | awk '{print $1}') - SHA_CONCAT="${SHA_CONCAT}${SHA}" - - let OFFSET=$OFFSET+4194304 - let SKIP=$SKIP+1 - done - - if [[ "`uname -s`" = "Darwin" ]] - then - # sed for macOS will give an error "bad flag in substitute command: 'I'" - # when using the original syntax. This option works instead. - shaHex=$(echo $SHA_CONCAT | sed 's/\([0-9A-Fa-f]\{2\}\)/\\x\1/g') - else - shaHex=$(echo $SHA_CONCAT | sed 's/\([0-9A-F]\{2\}\)/\\x\1/gI') - fi - - echo -ne $shaHex | shasum -a 256 | awk '{print $1}' +db_sha_local() { + local file file_size offset skip sha_concat sha sha_hex + file=$(normalize_path "$1") + file_size=$(file_size "$file") + offset=0 + skip=0 + sha_concat="" + + which shasum >/dev/null + if [[ $? -ne 0 ]]; then + echo "ERR" + return + fi + + while [[ $offset -lt "$file_size" ]]; do + dd if="$file" of="$CHUNK_FILE" bs=4194304 skip=$skip count=1 2>/dev/null + sha=$(shasum -a 256 "$CHUNK_FILE" | awk '{print $1}') + sha_concat="${sha_concat}${sha}" + + ((offset = offset + 4194304)) + ((skip = skip + 1)) + done + + if [[ "$(uname -s)" == "Darwin" ]]; then + # sed for macOS will give an error "bad flag in substitute command: 'I'" + # when using the original syntax. This option works instead. + sha_hex=$(echo "$sha_concat" | sed 's/\([0-9A-Fa-f]\{2\}\)/\\x\1/g') + else + sha_hex=$(echo "$sha_concat" | sed 's/\([0-9A-F]\{2\}\)/\\x\1/gI') + fi + + echo -ne "$sha_hex" | shasum -a 256 | awk '{print $1}' } + ################ #### SETUP #### ################ @@ -1563,77 +1609,88 @@ function db_sha_local #CHECKING FOR AUTH FILE if [[ -e $CONFIG_FILE ]]; then + # Test the config file + TEST_SOURCE="$(source "${CONFIG_FILE}" 2>&1 >/dev/null)" + if [[ $? -eq 0 ]]; then #Loading data... source "$CONFIG_FILE" 2>/dev/null + else + echo -ne "Error, config file ${CONFIG_FILE} contains invalid syntax\n" + echo -ne "Error details:\n" + echo "${TEST_SOURCE}" + mv "$CONFIG_FILE" "$CONFIG_FILE".old + exit 1 + fi - #Checking if it's still a v1 API configuration file - if [[ $CONFIGFILE_VERSION != "2.0" ]]; then - echo -ne "The config file contains the old deprecated v1 or v2 oauth tokens.\n" - echo -ne "Please run again the script and follow the configuration wizard. The old configuration file has been backed up to $CONFIG_FILE.old\n" - mv "$CONFIG_FILE" "$CONFIG_FILE".old - exit 1 - fi + #Checking if it's still a v1 API configuration file + if [[ $CONFIGFILE_VERSION != "2.0" ]]; then + echo -ne "The config file contains the old deprecated v1 or v2 oauth tokens.\n" + echo -ne "Please run again the script and follow the configuration wizard. The old configuration file has been backed up to $CONFIG_FILE.old\n" + mv "$CONFIG_FILE" "$CONFIG_FILE".old + exit 1 + fi - #Checking loaded data - if [[ $OAUTH_APP_KEY = "" || $OAUTH_APP_SECRET = "" || $OAUTH_REFRESH_TOKEN = "" ]]; then - echo -ne "Error loading data from $CONFIG_FILE...\n" - echo -ne "It is recommended to run $0 unlink\n" - remove_temp_files - exit 1 - fi + #Checking loaded data + if [[ $OAUTH_APP_KEY == "" || $OAUTH_APP_SECRET == "" || $OAUTH_REFRESH_TOKEN == "" ]]; then + echo -ne "Error loading data from $CONFIG_FILE...\n" + echo -ne "It is recommended to run $0 unlink or manually remove config file at $CONFIG_FILE\n" + remove_temp_files + exit 1 + fi #NEW SETUP... else - echo -ne "\n This is the first time you run this script, please follow the instructions:\n\n" - echo -ne "(note: Dropbox will change their API on 2021-09-30.\n" - echo -ne "When using dropbox_uploader.sh configured in the past with the old API, have a look at README.md, before continue.)\n\n" - echo -ne " 1) Open the following URL in your Browser, and log in using your account: $APP_CREATE_URL\n" - echo -ne " 2) Click on \"Create App\", then select \"Choose an API: Scoped Access\"\n" - echo -ne " 3) \"Choose the type of access you need: App folder\"\n" - echo -ne " 4) Enter the \"App Name\" that you prefer (e.g. MyUploader$RANDOM$RANDOM$RANDOM), must be unique\n\n" - echo -ne " Now, click on the \"Create App\" button.\n\n" - echo -ne " 5) Now the new configuration is opened, switch to tab \"permissions\" and check \"files.metadata.read/write\" and \"files.content.read/write\"\n" - echo -ne " Now, click on the \"Submit\" button.\n\n" - echo -ne " 6) Now to tab \"settings\" and provide the following information:\n" - - echo -ne " App key: " - read -r OAUTH_APP_KEY - - echo -ne " App secret: " - read -r OAUTH_APP_SECRET - - url="${API_OAUTH_AUTHORIZE}?client_id=${OAUTH_APP_KEY}&token_access_type=offline&response_type=code" - echo -ne " Open the following URL in your Browser and allow suggested permissions: ${url}\n" - echo -ne " Please provide the access code: " - read -r access_code - - echo -ne "\n > App key: ${OAUTH_APP_KEY}\n" - echo -ne " > App secret: '${OAUTH_APP_SECRET}\n" - echo -ne " > Access code: '${access_code}'. Looks ok? [y/N]: " - read -r answer - if [[ $answer != "y" ]]; then - remove_temp_files - exit 1 - fi - - $CURL_BIN $CURL_ACCEPT_CERTIFICATES $API_OAUTH_TOKEN -d code=$access_code -d grant_type=authorization_code -u $OAUTH_APP_KEY:$OAUTH_APP_SECRET -o "$RESPONSE_FILE" 2>/dev/null - check_http_response - OAUTH_REFRESH_TOKEN=$(sed -n 's/.*"refresh_token": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") - - echo "CONFIGFILE_VERSION=2.0" > "$CONFIG_FILE" - echo "OAUTH_APP_KEY=$OAUTH_APP_KEY" >> "$CONFIG_FILE" - echo "OAUTH_APP_SECRET=$OAUTH_APP_SECRET" >> "$CONFIG_FILE" - echo "OAUTH_REFRESH_TOKEN=$OAUTH_REFRESH_TOKEN" >> "$CONFIG_FILE" - echo " The configuration has been saved." - + echo -ne "\n This is the first time you run this script, please follow the instructions:\n\n" + echo -ne "(note: Dropbox will change their API from 30.9.2021.\n" + echo -ne "When using dropbox_uploader.sh configured in the past with the old API, have a look at README.md, before continue.)\n\n" + echo -ne " 1) Open the following URL in your Browser, and log in using your account: $APP_CREATE_URL\n" + echo -ne " 2) Click on \"Create App\", then select \"Choose an API: Scoped Access\"\n" + echo -ne " 3) \"Choose the type of access you need: App folder\"\n" + echo -ne " 4) Enter the \"App Name\" that you prefer (e.g. MyUploader$RANDOM$RANDOM$RANDOM), must be unique\n\n" + echo -ne " Now, click on the \"Create App\" button.\n\n" + echo -ne " 5) Now the new configuration is opened, switch to tab \"permissions\" and check \"files.metadata.read/write\" and \"files.content.read/write\"\n" + echo -ne " Now, click on the \"Submit\" button.\n\n" + echo -ne " 6) Now to tab \"settings\" and provide the following information:\n" + + echo -ne " App key: " + read -r OAUTH_APP_KEY + + echo -ne " App secret: " + read -r OAUTH_APP_SECRET + + URL="${API_OAUTH_AUTHORIZE}?client_id=${OAUTH_APP_KEY}&token_access_type=offline&response_type=code" + echo -ne " Open the following URL in your Browser and allow suggested permissions: ${URL}\n" + echo -ne " Please provide the access code: " + read -r ACCESS_CODE + + echo -ne "\n > App key: '${OAUTH_APP_KEY}'\n" + echo -ne " > App secret: '${OAUTH_APP_SECRET}'\n" + echo -ne " > Access code: '${ACCESS_CODE}'. Looks ok? [y/N]: " + read -r answer + if [[ $answer != "y" ]]; then remove_temp_files - exit 0 + exit 1 + fi + $CURL_BIN $CURL_ACCEPT_CERTIFICATES $API_OAUTH_TOKEN -d code=$ACCESS_CODE -d grant_type=authorization_code -u $OAUTH_APP_KEY:$OAUTH_APP_SECRET -o "$RESPONSE_FILE" 2>/dev/null + check_http_response $? + OAUTH_REFRESH_TOKEN=$(sed -n 's/.*"refresh_token": "\([^"]*\).*/\1/p' "$RESPONSE_FILE") + { + echo "CONFIGFILE_VERSION=2.0" + echo "OAUTH_APP_KEY=$OAUTH_APP_KEY" + echo "OAUTH_APP_SECRET=$OAUTH_APP_SECRET" + echo "OAUTH_REFRESH_TOKEN=$OAUTH_REFRESH_TOKEN" + } >"$CONFIG_FILE" + # Test dropbox account for upload, share and delete to work with this script + db_test + echo " The configuration has been saved." + + remove_temp_files + exit 0 fi # GET ACCESS TOKEN ensure_accesstoken - ################ #### START #### ################ @@ -1641,194 +1698,200 @@ ensure_accesstoken COMMAND="${*:$OPTIND:1}" ARG1="${*:$OPTIND+1:1}" ARG2="${*:$OPTIND+2:1}" +ARGS_NUM=$# -let argnum=$#-$OPTIND +((ARGNUM = ARGS_NUM - OPTIND)) #CHECKING PARAMS VALUES case $COMMAND in - upload) +upload) - if [[ $argnum -lt 2 ]]; then - usage - fi + if [[ $ARGNUM -lt 2 ]]; then + usage + fi - FILE_DST="${*:$#:1}" + FILE_DST="${*:$#:1}" + for ((i = OPTIND + 1; i < $#; i++)); do + FILE_SRC="${*:$i:1}" + db_upload "$FILE_SRC" "/$FILE_DST" + done - for (( i=OPTIND+1; i<$#; i++ )); do - FILE_SRC="${*:$i:1}" - db_upload "$FILE_SRC" "/$FILE_DST" - done + ;; - ;; +download) - download) + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - if [[ $argnum -lt 1 ]]; then - usage - fi + FILE_SRC="$ARG1" + FILE_DST="$ARG2" - FILE_SRC="$ARG1" - FILE_DST="$ARG2" + db_download "/$FILE_SRC" "$FILE_DST" - db_download "/$FILE_SRC" "$FILE_DST" + ;; - ;; +saveurl) - saveurl) + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - if [[ $argnum -lt 1 ]]; then - usage - fi + URL=$ARG1 + FILE_DST="$ARG2" - URL=$ARG1 - FILE_DST="$ARG2" + db_saveurl "$URL" "/$FILE_DST" - db_saveurl "$URL" "/$FILE_DST" + ;; - ;; - - share) +share) - if [[ $argnum -lt 1 ]]; then - usage - fi + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - FILE_DST="$ARG1" + FILE_DST="$ARG1" - db_share "/$FILE_DST" + db_share "/$FILE_DST" - ;; + ;; - info) +info) - db_account_info + db_account_info - ;; + ;; - space) +space) - db_account_space + db_account_space - ;; + ;; - delete|remove) +delete | remove) - if [[ $argnum -lt 1 ]]; then - usage - fi + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - FILE_DST="$ARG1" + FILE_DST="$ARG1" - db_delete "/$FILE_DST" + db_delete "/$FILE_DST" - ;; + ;; - move|rename) +move | rename) - if [[ $argnum -lt 2 ]]; then - usage - fi + if [[ $ARGNUM -lt 2 ]]; then + usage + fi - FILE_SRC="$ARG1" - FILE_DST="$ARG2" + FILE_SRC="$ARG1" + FILE_DST="$ARG2" - db_move "/$FILE_SRC" "/$FILE_DST" + db_move "/$FILE_SRC" "/$FILE_DST" - ;; + ;; - copy) +copy) - if [[ $argnum -lt 2 ]]; then - usage - fi + if [[ $ARGNUM -lt 2 ]]; then + usage + fi - FILE_SRC="$ARG1" - FILE_DST="$ARG2" + FILE_SRC="$ARG1" + FILE_DST="$ARG2" - db_copy "/$FILE_SRC" "/$FILE_DST" + db_copy "/$FILE_SRC" "/$FILE_DST" - ;; + ;; - mkdir) +mkdir) - if [[ $argnum -lt 1 ]]; then - usage - fi + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - DIR_DST="$ARG1" + DIR_DST="$ARG1" - db_mkdir "/$DIR_DST" + db_mkdir "/$DIR_DST" - ;; + ;; - search) +search) - if [[ $argnum -lt 1 ]]; then - usage - fi + if [[ $ARGNUM -lt 1 ]]; then + usage + fi - QUERY=$ARG1 + QUERY=$ARG1 - db_search "$QUERY" + db_search "$QUERY" - ;; + ;; - list) +list) - DIR_DST="$ARG1" + DIR_DST="$ARG1" - #Checking DIR_DST - if [[ $DIR_DST == "" ]]; then - DIR_DST="/" - fi + #Checking DIR_DST + if [[ $DIR_DST == "" ]]; then + DIR_DST="/" + fi - db_list "/$DIR_DST" + db_list "/$DIR_DST" - ;; + ;; - monitor) +monitor) - DIR_DST="$ARG1" - TIMEOUT=$ARG2 + DIR_DST="$ARG1" + TIMEOUT=$ARG2 - #Checking DIR_DST - if [[ $DIR_DST == "" ]]; then - DIR_DST="/" - fi + #Checking DIR_DST + if [[ $DIR_DST == "" ]]; then + DIR_DST="/" + fi - print " > Monitoring \"$DIR_DST\" for changes...\n" + print " > Monitoring \"$DIR_DST\" for changes...\n" - if [[ -n $TIMEOUT ]]; then - db_monitor_nonblock $TIMEOUT "/$DIR_DST" - else - db_monitor 60 "/$DIR_DST" - fi + if [[ -n $TIMEOUT ]]; then + db_monitor_nonblock "$TIMEOUT" "/$DIR_DST" + else + db_monitor 60 "/$DIR_DST" + fi - ;; + ;; - unlink) +unlink) - db_unlink + db_unlink - ;; + ;; - *) +test) + db_test + ;; +version) + show_version + ;; +*) - if [[ $COMMAND != "" ]]; then - print "Error: Unknown command: $COMMAND\n\n" - ERROR_STATUS=1 - fi - usage + if [[ $COMMAND != "" ]]; then + print "Error: Unknown command: $COMMAND\n\n" + ERROR_STATUS=1 + fi + usage - ;; + ;; esac remove_temp_files if [[ $ERROR_STATUS -ne 0 ]]; then - echo "Some error occured. rerun the script with \"-d\" option and check the output and logfile: $RESPONSE_FILE." + echo "Some error occurred. rerun the script with \"-d\" option and check the output and logfile: $RESPONSE_FILE." fi exit $ERROR_STATUS