tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

mbsdiff_hook.sh (4771B)


      1 #!/bin/bash
      2 # This Source Code Form is subject to the terms of the Mozilla Public
      3 # License, v. 2.0. If a copy of the MPL was not distributed with this
      4 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
      5 
      6 #
      7 # This tool contains functions that are to be used to handle/enable funsize
      8 # Author: Mihai Tabara
      9 #
     10 
     11 HOOK=
     12 AWS_BUCKET_NAME=
     13 LOCAL_CACHE_DIR=
     14 
     15 # Don't cache files smaller than this, as it's slower with S3
     16 # Bug 1437473
     17 CACHE_THRESHOLD=500000
     18 
     19 S3_CACHE_HITS=0
     20 S3_CACHE_MISSES=0
     21 
     22 getsha512(){
     23    openssl sha512 "${1}" | awk '{print $2}'
     24 }
     25 
     26 print_usage(){
     27    echo "$(basename "$0") [-S S3-BUCKET-NAME] [-c LOCAL-CACHE-DIR-PATH] [-g] [-u] PATH-FROM-URL PATH-TO-URL PATH-PATCH"
     28    echo "Script that saves/retrieves from cache presumptive patches as args"
     29    echo ""
     30    echo "-A SERVER-URL - host where to send the files"
     31    echo "-c LOCAL-CACHE-DIR-PATH local path to which patches are cached"
     32    echo "-g pre hook - tests whether patch already in cache"
     33    echo "-u post hook - upload patch to cache for future use"
     34    echo ""
     35    echo "PATH-FROM-URL     : path on disk for source file"
     36    echo "PATH-TO-URL       : path on disk for destination file"
     37    echo "PATH-PATCH        : path on disk for patch between source and destination"
     38 }
     39 
     40 upload_patch(){
     41    if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
     42    then
     43      return 0
     44    fi
     45    sha_from=$(getsha512 "$1")
     46    sha_to=$(getsha512 "$2")
     47    patch_path="$3"
     48    patch_filename="$(basename "$3")"
     49 
     50    # save to local cache first
     51    if [ -n "$LOCAL_CACHE_DIR" ]; then
     52        local_cmd="mkdir -p "$LOCAL_CACHE_DIR/$sha_from""
     53        if $local_cmd >&2; then
     54            cp -avf "${patch_path}" "$LOCAL_CACHE_DIR/$sha_from/$sha_to"
     55            echo "${patch_path} saved on local cache."
     56        fi
     57    fi
     58 
     59    if [ -n "${AWS_BUCKET_NAME}" ]; then
     60        BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${patch_filename}"
     61        if aws s3 cp "${patch_path}" "${BUCKET_PATH}"; then
     62            echo "${patch_path} saved on s://${AWS_BUCKET_NAME}"
     63            return 0
     64        fi
     65        echo "${patch_path} failed to be uploaded to s3://${AWS_BUCKET_NAME}"
     66        return 1
     67    fi
     68    return 0
     69 }
     70 
     71 get_patch(){
     72    # $1 and $2 are the /path/to/filename
     73    if [ "$(stat -c "%s" "$2")" -lt ${CACHE_THRESHOLD} ]
     74    then
     75      return 1
     76    fi
     77    sha_from=$(getsha512 "$1")
     78    sha_to=$(getsha512 "$2")
     79    destination_file="$3"
     80    s3_filename="$(basename "$3")"
     81 
     82    # Try to retrieve from local cache first.
     83    if [ -n "$LOCAL_CACHE_DIR" ]; then
     84        if [ -r "$LOCAL_CACHE_DIR/$sha_from/$sha_to" ]; then
     85            cp -avf "$LOCAL_CACHE_DIR/$sha_from/$sha_to" "$destination_file"
     86            echo "Successful retrieved ${destination_file} from local cache."
     87            return 0
     88        fi
     89    fi
     90    # If not in the local cache, we might find it remotely.
     91 
     92    if [ -n "${AWS_BUCKET_NAME}" ]; then
     93        BUCKET_PATH="s3://${AWS_BUCKET_NAME}${sha_from}/${sha_to}/${s3_filename}"
     94        if aws s3 ls "${BUCKET_PATH}"; then
     95            ((S3_CACHE_HITS++))
     96            echo "s3 cache hit for ${s3_filename} (${S3_CACHE_HITS} total hits)"
     97            if aws s3 cp "${BUCKET_PATH}" "${destination_file}"; then
     98                echo "Successful retrieved ${destination_file} from s3://${AWS_BUCKET_NAME}"
     99                return 0
    100            else
    101                echo "Failed to retrieve ${destination_file} from s3://${AWS_BUCKET_NAME}"
    102                return 1
    103            fi
    104        # Not found, fall through to default error
    105        else
    106            ((S3_CACHE_MISSES++))
    107            echo "s3 cache miss for ${s3_filename} (${S3_CACHE_MISSES} total misses)"
    108        fi
    109    fi
    110    return 1
    111 }
    112 
    113 OPTIND=1
    114 
    115 while getopts ":S:c:gu" option; do
    116    case $option in
    117        S)
    118            # This will probably be bucketname/path/prefix but we can use it either way
    119            AWS_BUCKET_NAME="$OPTARG"
    120            # Ensure trailing slash is there.
    121            if [[ ! $AWS_BUCKET_NAME =~ .*/$ ]]; then
    122              AWS_BUCKET_NAME="${AWS_BUCKET_NAME}/"
    123            fi
    124            ;;
    125        c)
    126            LOCAL_CACHE_DIR="$OPTARG"
    127            ;;
    128        g)
    129            HOOK="PRE"
    130            ;;
    131        u)
    132            HOOK="POST"
    133            ;;
    134        \?)
    135            echo "Invalid option: -$OPTARG" >&2
    136            print_usage
    137            exit 1
    138            ;;
    139        :)
    140            echo "Option -$OPTARG requires an argument." >&2
    141            print_usage
    142            exit 1
    143            ;;
    144        *)
    145            echo "Unimplemented option: -$OPTARG" >&2
    146            print_usage
    147            exit 1
    148            ;;
    149    esac
    150 done
    151 shift $((OPTIND-1))
    152 
    153 if [ "$HOOK" == "PRE" ]; then
    154    get_patch "$1" "$2" "$3"
    155 elif [ "$HOOK" == "POST" ]; then
    156    upload_patch "$1" "$2" "$3"
    157 fi