diff --git a/libraries/compile-examples/Dockerfile b/libraries/compile-examples/Dockerfile index 44a63e3d..ddbb443a 100644 --- a/libraries/compile-examples/Dockerfile +++ b/libraries/compile-examples/Dockerfile @@ -1,12 +1,15 @@ -# Container image that runs your code -FROM ubuntu:latest +FROM python:3.8.2 # Install prerequisites -RUN apt-get update --quiet=2 && apt-get install --quiet=2 --assume-yes wget -CMD /bin/bash +RUN apt-get update && apt-get install -y git wget jq curl \ + && rm -rf /var/lib/apt/lists/* # Copies your code file from your action repository to the filesystem path `/` of the container COPY entrypoint.sh /entrypoint.sh +COPY reportsizetrends /reportsizetrends + +# Install python dependencies +RUN pip install -r /reportsizetrends/requirements.txt # Code file to execute when the docker container starts up (`entrypoint.sh`) ENTRYPOINT ["/entrypoint.sh"] diff --git a/libraries/compile-examples/README.md b/libraries/compile-examples/README.md index 6a003c57..ef3729bd 100644 --- a/libraries/compile-examples/README.md +++ b/libraries/compile-examples/README.md @@ -20,10 +20,105 @@ For 3rd party boards, also specify the Boards Manager URL: List of library dependencies to install (space separated). Default `""`. +### `github-token` + +GitHub access token used to get information from the GitHub API. Only needed if you're using the size report features with private repositories. It will be convenient to use [`${{ secrets.GITHUB_TOKEN }}`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token). Default `""`. + +### `size-report-sketch` + +Name of the sketch used to compare memory usage change. Default `""`. + +### `enable-size-deltas-report` + +Set to `true` to cause the action to determine the change in memory usage for the [`size-reports-sketch`](#size-reports-sketch) between the pull request branch and the tip of the pull request's base branch. This may be used with the [`arduino/actions/libraries/report-size-deltas` action](https://github.com/arduino/actions/tree/master/libraries/report-size-deltas). Default `false`. + +### `size-deltas-report-folder-name` + +Folder to save the JSON formatted memory usage change reports to. Should be used only to store reports. It will be created under [`GITHUB_WORKSPACE`](https://help.github.com/en/actions/configuring-and-managing-workflows/using-environment-variables). The folder will be created if it doesn't already exist. Default `"size-deltas-reports"`. + +### `enable-size-trends-report` + +Set to `true` to cause the action to record the memory usage of [`size-reports-sketch`](#size-reports-sketch) to a Google Sheets spreadsheet on every push to the repository's default branch. Default `false`. + +### `keyfile` + +Contents of the Google key file used to update the size trends report Google Sheets spreadsheet. This should be defined using a [GitHub secret](https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets). Default `""`. +1. Open https://console.developers.google.com/project +1. Click the "Create Project" button. +1. In the "Project name" field, enter the name you want for your project. +1. You don't need to select anything from the "Location" menu. +1. Click the button with the three horizontal lines at the top left corner of the window. +1. Hover the mouse pointer over "APIs & Services". +1. Click "Library". +1. Make sure the name of the project you created is selected from the dropdown menu at the top of the window. +1. Click 'Google Sheets API". +1. Click the "Enable" button. +1. Click the "Create Credentials" button. +1. From the "Which API are you using?" menu, select "Google Sheets API". +1. From the "Where will you be calling the API from?" menu, select "Other non-UI". +1. From the "What data will you be accessing?" options, select "Application data". +1. From the "Are you planning to use this API with App Engine or Compute Engine?" options, select "No, I’m not using them". +1. Click the "What credentials do I need?" button. +1. In the "Service account name" field, enter the name you want to use for the service account. +1. From the "Role" menu, select "Project > Editor". +1. From the "Key type" options, select "JSON". +1. Click the "Continue" button. The .json file containing your private key will be downloaded. Save this somewhere safe. +1. Open the downloaded file. +1. Copy the entire contents of the file to the clipboard. +1. Open the GitHub page of the repository you are configuring the GitHub Actions workflow for. +1. Click the "Settings" tab. +1. From the menu on the left side of the window, click "Secrets". +1. Click the "Add a new secret" link. +1. In the "Name" field, enter the variable name you want to use for your secret. This will be used for the `size-trends-report-key-file` argument of the `compile-examples` action in your workflow configuration file. For example, if you named the secret `GOOGLE_KEY_FILE`, you would reference it in your workflow configuration as `${{ secrets.GOOGLE_KEY_FILE }}`. +1. In the "Value" field, paste the contents of the key file. +1. Click the "Add secret" button. +1. Open the downloaded key file again. +1. Copy the email address shown in the `client_email` field. +1. Open Google Sheets: https://docs.google.com/spreadsheets +1. Under "Start a new spreadsheet", click "Blank". +1. Click the "Share" button at the top right corner of the window. +1. If you haven't already, give your spreadsheet a name. +1. Paste the `client_email` email address into the "Enter names or email addresses..." field. +1. Uncheck the box next to "Notify people". +1. Click the "OK" button. +1. In the "Skip sending invitations?" dialog, click the "OK" button. + +### `size-trends-report-spreadsheet-id` + +The ID of the Google Sheets spreadsheet to write the memory usage trends data to. The URL of your spreadsheet will look something like `https://docs.google.com/spreadsheets/d/15WOp3vp-6AnTnWlNWaNWNl61Fe_j8UJhIKE0rVdV-7U/edit#gid=0`. In this example, the spreadsheet ID is `15WOp3vp-6AnTnWlNWaNWNl61Fe_j8UJhIKE0rVdV-7U`. Default `""`. + +### `size-trends-report-sheet-name` + +The sheet name in the Google Sheets spreadsheet used for the memory usage trends report. Default `"Sheet1"`. + ## Example usage +Only compiling examples: +```yaml +- uses: arduino/actions/libraries/compile-examples@master + with: + fqbn: 'arduino:avr:uno' +``` + +Storing the memory usage change report as a [workflow artifact](https://help.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts): +```yaml +- uses: arduino/actions/libraries/compile-examples@master + with: + size-report-sketch: Foobar + enable-size-deltas-report: true +- if: github.event_name == 'pull_request' + uses: actions/upload-artifact@v1 + with: + name: size-deltas-reports + path: size-delta-reports +``` + +Publishing memory usage trends data to a Google Sheets spreadsheet: ```yaml -uses: arduino/actions/libraries/compile-examples@master -with: - fqbn: 'arduino:avr:uno' +- uses: arduino/actions/libraries/compile-examples@master + with: + size-report-sketch: Foobar + enable-size-trends-report: true + keyfile: ${{ secrets.GOOGLE_KEY_FILE }} + size-trends-report-spreadsheet-id: 15WOp3vp-6AnTnWlNWaNWNl61Fe_j8UJhIKE0rVdV-7U ``` diff --git a/libraries/compile-examples/action.yml b/libraries/compile-examples/action.yml index 9fa40ffa..ae6609ed 100644 --- a/libraries/compile-examples/action.yml +++ b/libraries/compile-examples/action.yml @@ -10,6 +10,30 @@ inputs: libraries: description: 'List of library dependencies to install (space separated)' default: '' + github-token: + description: 'GitHub access token used to get information from the GitHub API. Only needed if you are using the size report features with private repositories.' + default: '' + size-report-sketch: + description: 'Name of the sketch used to compare memory usage change' + default: '' + enable-size-deltas-report: + description: 'Set to true to cause the action to determine the change in memory usage for the size-reports-sketch' + default: false + size-deltas-report-folder-name: + description: 'Folder to save the memory usage change report to' + default: 'size-deltas-reports' + enable-size-trends-report: + description: 'Set to true to cause the action to record the memory usage of size-reports-sketch' + default: false + keyfile: + description: 'Google key file used to update the size trends report Google Sheets spreadsheet.' + default: '' + size-trends-report-spreadsheet-id: + description: 'The ID of the Google Sheets spreadsheet to write the memory usage trends data to' + default: '' + size-trends-report-sheet-name: + description: 'The sheet name in the Google Sheets spreadsheet used for the memory usage trends report' + default: 'Sheet1' runs: using: 'docker' image: 'Dockerfile' @@ -17,3 +41,10 @@ runs: - ${{ inputs.cli-version }} - ${{ inputs.fqbn }} - ${{ inputs.libraries }} + - ${{ inputs.github-token }} + - ${{ inputs.size-report-sketch }} + - ${{ inputs.enable-size-deltas-report }} + - ${{ inputs.size-deltas-report-folder-name }} + - ${{ inputs.enable-size-trends-report }} + - ${{ inputs.size-trends-report-spreadsheet-id }} + - ${{ inputs.size-trends-report-sheet-name }} diff --git a/libraries/compile-examples/entrypoint.sh b/libraries/compile-examples/entrypoint.sh index 0aaf783c..175a1020 100755 --- a/libraries/compile-examples/entrypoint.sh +++ b/libraries/compile-examples/entrypoint.sh @@ -1,8 +1,17 @@ #!/bin/bash -readonly CLI_VERSION="$1" -readonly FQBN_ARG="$2" -readonly LIBRARIES="$3" +readonly CLI_VERSION="${1}" +readonly FQBN_ARG="${2}" +readonly LIBRARIES="${3}" +readonly GH_TOKEN="${4}" +readonly SIZE_REPORT_SKETCH="${5}" +ENABLE_SIZE_DELTAS_REPORT="${6}" +readonly SIZE_DELTAS_REPORT_FOLDER_NAME="${7}" +ENABLE_SIZE_TRENDS_REPORT="${8}" +readonly SIZE_TRENDS_REPORT_SPREADSHEET_ID="${9}" +readonly SIZE_TRENDS_REPORT_SHEET_NAME="${10}" + +readonly SIZE_NOT_APPLICABLE_INDICATOR='"N/A"' # Determine cli archive readonly CLI_ARCHIVE="arduino-cli_${CLI_VERSION}_Linux_64bit.tar.gz" @@ -16,6 +25,117 @@ readonly CORE="$(echo "$FQBN" | cut --delimiter=':' --fields=1,2)" # Additional Boards Manager URL readonly ADDITIONAL_URL="${FQBN_ARRAY[1]}" +function compile_example() { + local -r examplePath="$1" + arduino-cli compile --verbose --warnings all --fqbn "$FQBN" "$examplePath" || { + return $? + } +} + +# Provide a more meaningful indicator in the report when a size could not be determined +function check_sizes() { + if [[ "$FLASH_SIZE" == "" ]]; then + FLASH_SIZE="$SIZE_NOT_APPLICABLE_INDICATOR" + fi + if [[ "$RAM_SIZE" == "" ]]; then + RAM_SIZE="$SIZE_NOT_APPLICABLE_INDICATOR" + fi +} + +# Get the memory usage from the compilation output +function compile_example_get_size_from_output() { + local -r examplePath="$1" + + FLASH_SIZE="" + RAM_SIZE="" + + local compilationOutput + compilationOutput=$(compile_example "$EXAMPLE" 2>&1) + local -r compileExampleExitStatus=$? + # Display the compilation output + echo "$compilationOutput" + if [[ $compileExampleExitStatus -ne 0 ]]; then + return $compileExampleExitStatus + fi + + while read -r outputLine; do + # Determine program storage memory usage + programStorageRegex="Sketch uses ([0-9,]+) *" + if [[ "$outputLine" =~ $programStorageRegex ]]; then + FLASH_SIZE="${BASH_REMATCH[1]}" + # Remove commas + FLASH_SIZE="${FLASH_SIZE//,/}" + fi + + # Determine dynamic memory usage + dynamicMemoryRegex="Global variables use ([0-9,]+) *" + if [[ "$outputLine" =~ $dynamicMemoryRegex ]]; then + RAM_SIZE="${BASH_REMATCH[1]}" + # Remove commas + RAM_SIZE="${RAM_SIZE//,/}" + fi + done <<<"$compilationOutput" + + # Some platforms aren't configured to output RAM usage by global variables (e.g., Arduino SAM Boards), but the flash usage should at least be in the output + if [[ "$FLASH_SIZE" == "" && "$RAM_SIZE" == "" ]]; then + echo "::error::Something went wrong while while determining memory usage of the size-report-sketch" + exit 1 + fi +} + +if [[ "$GITHUB_EVENT_NAME" != "pull_request" ]]; then + ENABLE_SIZE_DELTAS_REPORT='false' +fi + +if [[ "$GITHUB_EVENT_NAME" != "push" ]]; then + ENABLE_SIZE_TRENDS_REPORT='false' +fi + +# If the enable-size-deltas-report argument is set to true, the size-report-sketch argument must also be defined +if [[ "$ENABLE_SIZE_DELTAS_REPORT" == "true" && "$SIZE_REPORT_SKETCH" == "" ]]; then + echo "::error::size-report-sketch argument was not defined" + exit 1 +fi + +# If the enable-size-deltas-report argument is set to true, the size-deltas-report-folder-path argument must also be defined +if [[ "$ENABLE_SIZE_DELTAS_REPORT" == "true" && "$SIZE_DELTAS_REPORT_FOLDER_NAME" == "" ]]; then + echo "::error::size-deltas-report-folder-path argument was not defined" + exit 1 +fi + +# If the enable-size-trends-report argument is set to true, the size-trends-report-key-file argument must also be defined +if [[ "$ENABLE_SIZE_TRENDS_REPORT" == "true" && "$INPUT_KEYFILE" == "" ]]; then + echo "::error::size-trends-report-key-file argument was not defined" + exit 1 +fi + +# If the enable-size-trends-report argument is set to true, the size-trends-report-spreadsheet-id argument must also be defined +if [[ "$ENABLE_SIZE_TRENDS_REPORT" == "true" && "$SIZE_TRENDS_REPORT_SPREADSHEET_ID" == "" ]]; then + echo "::error::size-trends-report-spreadsheet-id argument was not defined" + exit 1 +fi + +# Only publish size trends report on push to the default branch +if [[ "$ENABLE_SIZE_TRENDS_REPORT" == "true" ]]; then + # Determine the current branch + CURRENT_BRANCH_NAME="${GITHUB_REF##*/}" + + if [[ "$GH_TOKEN" == "" ]]; then + # Access token is not needed for public repositories + readonly DEFAULT_BRANCH_NAME="$(curl "https://api.github.com/repos/${GITHUB_REPOSITORY}" | jq --raw-output .default_branch)" + else + readonly DEFAULT_BRANCH_NAME="$(curl --header "Authorization: token ${GH_TOKEN}" "https://api.github.com/repos/${GITHUB_REPOSITORY}" | jq --raw-output .default_branch)" + fi + if [[ "$DEFAULT_BRANCH_NAME" == "null" ]]; then + echo "::error::Unable to determine default branch name. Please specify the github-token argument in your workflow configuration." + exit 1 + fi + + if [[ "$CURRENT_BRANCH_NAME" != "$DEFAULT_BRANCH_NAME" ]]; then + ENABLE_SIZE_TRENDS_REPORT='false' + fi +fi + # Download the arduino-cli wget --no-verbose --directory-prefix="$HOME" "https://downloads.arduino.cc/arduino-cli/$CLI_ARCHIVE" || { exit 1 @@ -68,9 +188,92 @@ fi SCRIPT_EXIT_STATUS=0 for EXAMPLE in $EXAMPLES; do echo "Building example $EXAMPLE" - arduino-cli compile --verbose --warnings all --fqbn "$FQBN" "$EXAMPLE" || { - SCRIPT_EXIT_STATUS="$?" - } + + if [[ ("$ENABLE_SIZE_DELTAS_REPORT" != "true" && "$ENABLE_SIZE_TRENDS_REPORT" != "true") || "${EXAMPLE##*/}" != "$SIZE_REPORT_SKETCH" ]]; then + # Don't determine size + compile_example "$EXAMPLE" || { + SCRIPT_EXIT_STATUS="$?" + } + continue + elif [[ ("$ENABLE_SIZE_DELTAS_REPORT" == "true" || "$ENABLE_SIZE_TRENDS_REPORT" == "true") && "${EXAMPLE##*/}" == "$SIZE_REPORT_SKETCH" ]]; then + # Do determine size + + # Determine memory usage of the sketch at the tip of the pull request branch + compile_example_get_size_from_output "$EXAMPLE" || { + SCRIPT_EXIT_STATUS="$?" + continue + } + check_sizes + + readonly CURRENT_FLASH_SIZE="$FLASH_SIZE" + readonly CURRENT_RAM_SIZE="$RAM_SIZE" + + if [[ "$ENABLE_SIZE_TRENDS_REPORT" == "true" ]]; then + readonly SHORT_COMMIT_HASH="$(git rev-parse --short HEAD)" + python3 /reportsizetrends/reportsizetrends.py --spreadsheet-id "$SIZE_TRENDS_REPORT_SPREADSHEET_ID" --sheet-name "$SIZE_TRENDS_REPORT_SHEET_NAME" --google-key-file "$INPUT_KEYFILE" --sketch-name="$EXAMPLE" --commit-hash="$SHORT_COMMIT_HASH" --commit-url="https://github.com/${GITHUB_REPOSITORY}/commit/${SHORT_COMMIT_HASH}" --fqbn="$FQBN" --flash="$CURRENT_FLASH_SIZE" --ram="$CURRENT_RAM_SIZE" || { + echo "::error::Could not update size trends report spreadsheet" + exit 1 + } + fi + + if [[ "$ENABLE_SIZE_DELTAS_REPORT" == "true" ]]; then + # Determine memory usage of the sketch at the tip of the target repository's default branch + + # Save the commit hash for the tip of the pull request branch + readonly CURRENT_COMMIT="$(git rev-parse HEAD)" + + # checkout the tip of the pull request's base branch + + # Determine the pull request number, to use for the GitHub API request + readonly PULL_REQUEST_NUMBER="$(jq --raw-output '.pull_request.number' "$GITHUB_EVENT_PATH")" + if [[ "$GH_TOKEN" == "" ]]; then + # Access token is not needed for public repositories + readonly BASE_BRANCH_NAME="$(curl "https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${PULL_REQUEST_NUMBER}" | jq --raw-output .base.ref)" + else + readonly BASE_BRANCH_NAME="$(curl --header "Authorization: token ${GH_TOKEN}" "https://api.github.com/repos/${GITHUB_REPOSITORY}/pulls/${PULL_REQUEST_NUMBER}" | jq --raw-output .base.ref)" + fi + if [[ "$BASE_BRANCH_NAME" == "null" ]]; then + echo "::error::Unable to determine base branch name. Please specify the size-report-github-token argument in your workflow configuration." + exit 1 + fi + git checkout "$BASE_BRANCH_NAME" || { + echo "::error::Failed to checkout base branch" + exit 1 + } + + # Compile the example sketch and get the sizes + compile_example_get_size_from_output "$EXAMPLE" + check_sizes + + if [[ "$CURRENT_FLASH_SIZE" == "$SIZE_NOT_APPLICABLE_INDICATOR" || "$FLASH_SIZE" == "$SIZE_NOT_APPLICABLE_INDICATOR" ]]; then + FLASH_DELTA="$SIZE_NOT_APPLICABLE_INDICATOR" + else + FLASH_DELTA="$((CURRENT_FLASH_SIZE - FLASH_SIZE))" + fi + echo "Change in flash memory usage: $FLASH_DELTA" + if [[ "$CURRENT_RAM_SIZE" == "$SIZE_NOT_APPLICABLE_INDICATOR" || "$RAM_SIZE" == "$SIZE_NOT_APPLICABLE_INDICATOR" ]]; then + RAM_DELTA="$SIZE_NOT_APPLICABLE_INDICATOR" + else + RAM_DELTA="$((CURRENT_RAM_SIZE - RAM_SIZE))" + fi + echo "Change in RAM used by globals: $RAM_DELTA" + + # Create the report folder + readonly SIZE_REPORT_FOLDER_PATH="${GITHUB_WORKSPACE}/${SIZE_DELTAS_REPORT_FOLDER_NAME}" + if ! [[ -d "$SIZE_REPORT_FOLDER_PATH" ]]; then + mkdir --parents "$SIZE_REPORT_FOLDER_PATH" + fi + # Create the report file + readonly SIZE_REPORT_FILE_PATH="${SIZE_REPORT_FOLDER_PATH}/${FQBN//:/-}.json" + echo "{\"fqbn\": \"${FQBN}\", \"sketch\": \"${EXAMPLE}\", \"previous_flash\": ${FLASH_SIZE}, \"flash\": ${CURRENT_FLASH_SIZE}, \"flash_delta\": ${FLASH_DELTA}, \"previous_ram\": ${RAM_SIZE}, \"ram\": ${CURRENT_RAM_SIZE}, \"ram_delta\": ${RAM_DELTA}}" | jq . >"$SIZE_REPORT_FILE_PATH" + + # Switch back to the previous commit in the repository + git checkout "$CURRENT_COMMIT" || { + echo "::error::Could not checkout the PR's head branch" + exit 1 + } + fi + fi done exit $SCRIPT_EXIT_STATUS diff --git a/libraries/compile-examples/reportsizetrends/reportsizetrends.py b/libraries/compile-examples/reportsizetrends/reportsizetrends.py new file mode 100644 index 00000000..568d0254 --- /dev/null +++ b/libraries/compile-examples/reportsizetrends/reportsizetrends.py @@ -0,0 +1,297 @@ +import logging +import argparse +import json +import datetime +from google.oauth2 import service_account +from googleapiclient import discovery + +# import httplib2 +# httplib2.debuglevel = 4 + +logging.basicConfig(level=logging.CRITICAL) +logger = logging.getLogger(__name__) +logger_level = logging.WARNING + + +def main(argument): + set_verbosity(enable_verbosity=argument.enable_verbosity) + + report_size_trends = ReportSizeTrends(google_key_file=argument.google_key_file, + spreadsheet_id=argument.spreadsheet_id, + sheet_name=argument.sheet_name, + sketch_name=argument.sketch_name, + commit_hash=argument.commit_hash, + commit_url=argument.commit_url, + fqbn=argument.fqbn, + flash=argument.flash, + ram=argument.ram) + + report_size_trends.report_size_trends() + + +def set_verbosity(enable_verbosity): + """Turn debug output on or off. + + Keyword arguments: + enable_verbosity -- this will generally be controlled via the script's --verbose command line argument + (True, False) + """ + # DEBUG: automatically generated output and all higher log level output + # INFO: manually specified output and all higher log level output + verbose_logging_level = logging.DEBUG + + if type(enable_verbosity) is not bool: + raise TypeError + if enable_verbosity: + logger.setLevel(level=verbose_logging_level) + else: + logger.setLevel(level=logging.WARNING) + + +class ReportSizeTrends: + """Methods for reporting memory usage to a Google Sheets spreadsheet + + Keyword arguments: + google_key_file -- Google key file that gives write access to the Google Sheets API + spreadsheet_id -- ID of the spreadsheet + sheet_name -- name of the spreadsheet's sheet to use for the report + sketch_name -- name of the example sketch the memory usage data comes from + commit_hash -- SHA of the commit the data applies to + commit_url -- GitHub URL for the commit + fqbn -- FQBN of the board the data is for + flash -- flash memory usage of the sketch when compiled for the FQBN + ram -- dynamic memory used by globals of the sketch when compiled for the FQBN + """ + heading_row_number = "1" + timestamp_column_letter = "A" + timestamp_column_heading = "Commit Timestamp" + sketch_name_column_letter = "B" + sketch_name_column_heading = "Sketch Name" + commit_hash_column_letter = "C" + commit_hash_column_heading = "Commit Hash" + shared_data_first_column_letter = timestamp_column_letter + shared_data_last_column_letter = commit_hash_column_letter + shared_data_columns_headings_data = ( + "[[\"" + timestamp_column_heading + "\",\"" + sketch_name_column_heading + "\",\"" + + commit_hash_column_heading + "\"]]") + + # These are appended to the FQBN as the size data column headings + flash_heading_indicator = " flash" + ram_heading_indicator = " RAM" + + def __init__(self, google_key_file, spreadsheet_id, sheet_name, sketch_name, commit_hash, commit_url, fqbn, flash, + ram): + self.google_key_file = google_key_file + self.sheet_name = sheet_name + self.spreadsheet_id = spreadsheet_id + self.fqbn = fqbn + self.commit_hash = commit_hash + self.sketch_name = sketch_name + self.commit_url = commit_url + self.flash = flash + self.ram = ram + + def report_size_trends(self): + """Add memory usage data to a Google Sheets spreadsheet""" + self.service = self.get_service(google_key_file=self.google_key_file) + + heading_row_data = self.get_heading_row_data() + + if ("values" in heading_row_data) is False: + # Fresh sheet, so fill in the shared data headings + logger.info("Initializing empty sheet") + self.populate_shared_data_headings() + + # Get the heading row data again in case it changed + heading_row_data = self.get_heading_row_data() + + data_column_letters = self.get_data_column_letters(heading_row_data=heading_row_data) + + if not data_column_letters["populated"]: + # Columns don't exist for this board yet, so create them + self.populate_data_column_headings(flash_column_letter=data_column_letters["flash"], + ram_column_letter=data_column_letters["ram"]) + + current_row = self.get_current_row() + + if not current_row["populated"]: + # A row doesn't exist for this commit yet, so create one + self.create_row(row_number=current_row["number"]) + + self.write_memory_usage_data(flash_column_letter=data_column_letters["flash"], + ram_column_letter=data_column_letters["ram"], + row_number=current_row["number"], + flash=self.flash, + ram=self.ram) + + def get_service(self, google_key_file): + """Return the Google API service object + + Keyword arguments: + google_key_file -- contents of the Google private key file + """ + credentials = service_account.Credentials.from_service_account_info( + json.loads(google_key_file, strict=False), scopes=['https://www.googleapis.com/auth/spreadsheets']) + return discovery.build('sheets', 'v4', credentials=credentials) + + def get_heading_row_data(self): + """Return the contents of the heading row""" + spreadsheet_range = self.sheet_name + "!" + self.heading_row_number + ":" + self.heading_row_number + request = self.service.spreadsheets().values().get(spreadsheetId=self.spreadsheet_id, range=spreadsheet_range) + response = request.execute() + logger.debug("heading_row_data: ") + logger.debug(response) + return response + + def populate_shared_data_headings(self): + """Add the headings to the shared data columns (timestamp, sketch name, commit)""" + spreadsheet_range = ( + self.sheet_name + "!" + self.shared_data_first_column_letter + self.heading_row_number + ":" + + self.shared_data_last_column_letter + self.heading_row_number) + request = self.service.spreadsheets().values().update(spreadsheetId=self.spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads( + self.shared_data_columns_headings_data)}) + response = request.execute() + logger.debug(response) + + def get_data_column_letters(self, heading_row_data): + """Return a dictionary containing the data column numbers for the board + populated -- whether the column headings have been added + flash -- letter of the column containing flash usage data + ram -- letter of the column containing ram usage data + + Keyword arguments: + heading_row_data -- the contents of the heading row of the spreadsheet, as returned by get_heading_row_data() + """ + populated = False + index = 0 + for index, cell_text in enumerate(heading_row_data["values"][0]): + if cell_text == self.fqbn + self.flash_heading_indicator: + populated = True + break + + if not populated: + # Use the next columns + index += 1 + + board_data_flash_column_letter = chr(index + 65) + board_data_ram_column_letter = chr(index + 1 + 65) + logger.info("Flash data column: " + board_data_flash_column_letter) + logger.info("RAM data column: " + board_data_ram_column_letter) + return {"populated": populated, "flash": board_data_flash_column_letter, "ram": board_data_ram_column_letter} + + def populate_data_column_headings(self, flash_column_letter, ram_column_letter): + """Add the headings to the data columns for this FQBN + + Keyword arguments: + flash_column_letter -- letter of the column that contains the flash usage data + ram_column_letter -- letter of the column that contains the dynamic memory used by globals data + """ + logger.info("No data columns found for " + self.fqbn + ". Adding column headings at columns " + + flash_column_letter + " and " + ram_column_letter) + spreadsheet_range = (self.sheet_name + "!" + flash_column_letter + self.heading_row_number + ":" + + ram_column_letter + self.heading_row_number) + board_data_headings_data = ("[[\"" + self.fqbn + self.flash_heading_indicator + "\",\"" + self.fqbn + + self.ram_heading_indicator + "\"]]") + request = self.service.spreadsheets().values().update(spreadsheetId=self.spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads(board_data_headings_data)}) + response = request.execute() + logger.debug(response) + + def get_current_row(self): + """Return a dictionary for the current row: + populated -- whether the shared data has already been added to the row + number -- the row number + """ + spreadsheet_range = (self.sheet_name + "!" + self.commit_hash_column_letter + ":" + + self.commit_hash_column_letter) + request = self.service.spreadsheets().values().get(spreadsheetId=self.spreadsheet_id, + range=spreadsheet_range) + commit_hash_column_data = request.execute() + logger.debug(commit_hash_column_data) + + populated = False + index = 0 + for index, cell_text in enumerate(commit_hash_column_data["values"], start=1): + if cell_text[0] == self.commit_hash: + populated = True + break + + if not populated: + index += 1 + + logger.info("Current row number: " + str(index)) + return {"populated": populated, "number": index} + + def create_row(self, row_number): + """Add the shared data to the row + + Keyword arguments: + row_number -- row number + """ + logger.info("No row found for the commit hash: " + self.commit_hash + ". Creating a new row #" + + str(row_number)) + spreadsheet_range = (self.sheet_name + "!" + self.shared_data_first_column_letter + str(row_number) + + ":" + self.shared_data_last_column_letter + str(row_number)) + shared_data_columns_data = ("[[\"" + "{:%Y-%m-%d %H:%M:%S}".format(datetime.datetime.now()) + "\",\"" + + self.sketch_name + "\",\"=HYPERLINK(\\\"" + self.commit_url + "\\\",T(\\\"" + + self.commit_hash + "\\\"))\"]]") + request = self.service.spreadsheets().values().update(spreadsheetId=self.spreadsheet_id, + range=spreadsheet_range, + valueInputOption="USER_ENTERED", + body={"values": json.loads(shared_data_columns_data)}) + response = request.execute() + logger.debug(response) + + def write_memory_usage_data(self, flash_column_letter, ram_column_letter, row_number, flash, ram): + """Write the memory usage data for the board to the spreadsheet + + Keyword arguments: + flash_column_letter -- letter of the column containing flash memory usage data for the board + ram_column_letter -- letter of the column containing dynamic memory used for global variables for the board + row_number -- number of the row to write to + flash -- flash usage + ram -- dynamic memory used for global variables + """ + spreadsheet_range = (self.sheet_name + "!" + flash_column_letter + str(row_number) + ":" + + ram_column_letter + str(row_number)) + size_data = "[[" + flash + "," + ram + "]]" + request = self.service.spreadsheets().values().update(spreadsheetId=self.spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads(size_data)}) + response = request.execute() + logger.debug(response) + + +# Only execute the following code if the script is run directly, not imported +if __name__ == '__main__': + # Parse command line arguments + argument_parser = argparse.ArgumentParser() + argument_parser.add_argument("--google-key-file", dest="google_key_file", + help="Contents of the Google authentication key file") + argument_parser.add_argument("--spreadsheet-id", dest="spreadsheet_id", + help="ID of the Google Sheets spreadsheet to edit") + argument_parser.add_argument("--sheet-name", dest="sheet_name", + help="Sheet name of the Google Sheets spreadsheet to edit") + argument_parser.add_argument("--sketch-name", dest="sketch_name", + help="Name of the sketch the size data is from") + argument_parser.add_argument("--commit-hash", dest="commit_hash", + help="Commit hash the size data is for") + argument_parser.add_argument("--commit-url", dest="commit_url", + help="URL of the commit") + argument_parser.add_argument("--fqbn", dest="fqbn", + help="FQBN of the board being compiled for") + argument_parser.add_argument("--flash", dest="flash", + help="Flash usage of the sketch") + argument_parser.add_argument("--ram", dest="ram", + help="RAM usage for global variables of the sketch") + argument_parser.add_argument("--verbose", dest="enable_verbosity", help="Enable verbose output", + action="store_true") + + # Run program + main(argument_parser.parse_args()) diff --git a/libraries/compile-examples/reportsizetrends/requirements.txt b/libraries/compile-examples/reportsizetrends/requirements.txt new file mode 100644 index 00000000..b00bff59 --- /dev/null +++ b/libraries/compile-examples/reportsizetrends/requirements.txt @@ -0,0 +1,2 @@ +google-auth~=1.11 +google-api-python-client~=1.7 diff --git a/libraries/compile-examples/reportsizetrends/tests/test_reportsizetrends.py b/libraries/compile-examples/reportsizetrends/tests/test_reportsizetrends.py new file mode 100644 index 00000000..5b8a4328 --- /dev/null +++ b/libraries/compile-examples/reportsizetrends/tests/test_reportsizetrends.py @@ -0,0 +1,308 @@ +import unittest.mock +from reportsizetrends import * + + +# Stub +class Service: + x = 0 + + def spreadsheets(self): + self.x = 42 + return Service() + + def values(self): + self.x = 42 + return Service() + + +# noinspection PyUnresolvedReferences +class TestReportsizetrends(unittest.TestCase): + set_verbosity(enable_verbosity=False) + + # @unittest.skip("") + def test_set_verbosity(self): + with self.assertRaises(TypeError): + set_verbosity(enable_verbosity=2) + set_verbosity(enable_verbosity=True) + set_verbosity(enable_verbosity=False) + + # @unittest.skip("") + def test_report_size_trends(self): + google_key_file = "test_google_key_file" + flash = 42 + ram = 11 + heading_row_data = {} + current_row = {"populated": False, "number": 42} + data_column_letters = {"populated": False, "flash": "A", "ram": "B"} + report_size_trends = ReportSizeTrends(google_key_file=google_key_file, + spreadsheet_id="foo", + sheet_name="foo", + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn="foo", + flash=flash, + ram=ram) + + report_size_trends.get_service = unittest.mock.MagicMock() + report_size_trends.get_heading_row_data = unittest.mock.MagicMock(return_value=heading_row_data) + report_size_trends.populate_shared_data_headings = unittest.mock.MagicMock() + report_size_trends.get_data_column_letters = unittest.mock.MagicMock(return_value=data_column_letters) + report_size_trends.populate_data_column_headings = unittest.mock.MagicMock() + report_size_trends.get_current_row = unittest.mock.MagicMock(return_value=current_row) + report_size_trends.create_row = unittest.mock.MagicMock() + report_size_trends.write_memory_usage_data = unittest.mock.MagicMock() + + # Test unpopulated shared data headings + report_size_trends.report_size_trends() + + report_size_trends.get_service.assert_called_once_with(google_key_file=google_key_file) + report_size_trends.get_heading_row_data.assert_has_calls([unittest.mock.call(), unittest.mock.call()]) + report_size_trends.populate_shared_data_headings.assert_called_once() + report_size_trends.get_data_column_letters.assert_called_once_with(heading_row_data=heading_row_data) + report_size_trends.populate_data_column_headings.assert_called_once_with( + flash_column_letter=data_column_letters["flash"], + ram_column_letter=data_column_letters["ram"] + ) + report_size_trends.get_current_row.assert_called_once() + report_size_trends.create_row.assert_called_once_with(row_number=current_row["number"]) + report_size_trends.write_memory_usage_data.assert_called_once_with(flash_column_letter=data_column_letters["flash"], + ram_column_letter=data_column_letters["ram"], + row_number=current_row["number"], + flash=flash, + ram=ram) + + # Test populated shared data headings + heading_row_data = {"values": "foo"} + report_size_trends.get_heading_row_data = unittest.mock.MagicMock(return_value=heading_row_data) + report_size_trends.populate_shared_data_headings.reset_mock() + report_size_trends.report_size_trends() + report_size_trends.populate_shared_data_headings.assert_not_called() + + # Test pre-populated data column headings + data_column_letters["populated"] = True + report_size_trends.get_data_column_letters = unittest.mock.MagicMock(return_value=data_column_letters) + report_size_trends.populate_data_column_headings.reset_mock() + report_size_trends.report_size_trends() + report_size_trends.populate_data_column_headings.assert_not_called() + + # Test pre-populated row + current_row["populated"] = True + report_size_trends.get_current_row = unittest.mock.MagicMock(return_value=current_row) + report_size_trends.create_row.reset_mock() + report_size_trends.report_size_trends() + report_size_trends.create_row.assert_not_called() + + # @unittest.skip("") + def test_get_heading_row_data(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn="foo", + flash=42, + ram=11) + heading_row_data = "test_heading_row_data" + + Service.get = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock(return_value=heading_row_data) + report_size_trends.service = Service() + + self.assertEqual(heading_row_data, report_size_trends.get_heading_row_data()) + spreadsheet_range = (sheet_name + "!" + report_size_trends.heading_row_number + ":" + + report_size_trends.heading_row_number) + Service.get.assert_called_once_with(spreadsheetId=spreadsheet_id, range=spreadsheet_range) + Service.execute.assert_called_once() + + # @unittest.skip("") + def test_populate_shared_data_headings(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn="foo", + flash=42, + ram=11) + + Service.update = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock() + report_size_trends.service = Service() + + report_size_trends.populate_shared_data_headings() + spreadsheet_range = ( + sheet_name + "!" + report_size_trends.shared_data_first_column_letter + + report_size_trends.heading_row_number + ":" + report_size_trends.shared_data_last_column_letter + + report_size_trends.heading_row_number + ) + Service.update.assert_called_once_with( + spreadsheetId=spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads( + report_size_trends.shared_data_columns_headings_data)} + ) + Service.execute.assert_called_once() + + # @unittest.skip("") + def test_get_data_column_letters(self): + fqbn = "test_fqbn" + + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id="foo", + sheet_name="foo", + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn=fqbn, + flash=42, + ram=11) + heading_row_data = {"values": [["foo", "bar"]]} + column_letters = report_size_trends.get_data_column_letters(heading_row_data) + self.assertEqual(False, column_letters["populated"]) + self.assertEqual("C", column_letters["flash"]) + self.assertEqual("D", column_letters["ram"]) + + heading_row_data = {"values": [["foo", report_size_trends.fqbn + report_size_trends.flash_heading_indicator]]} + column_letters = report_size_trends.get_data_column_letters(heading_row_data) + self.assertEqual(True, column_letters["populated"]) + self.assertEqual("B", column_letters["flash"]) + self.assertEqual("C", column_letters["ram"]) + + # @unittest.skip("") + def test_populate_data_column_headings(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + fqbn = "test_fqbn" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn=fqbn, + flash=42, + ram=11) + flash_column_letter = "A" + ram_column_letter = "B" + + Service.update = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock() + report_size_trends.service = Service() + + report_size_trends.populate_data_column_headings(flash_column_letter=flash_column_letter, + ram_column_letter=ram_column_letter) + spreadsheet_range = (sheet_name + "!" + flash_column_letter + report_size_trends.heading_row_number + ":" + + ram_column_letter + report_size_trends.heading_row_number) + board_data_headings_data = ("[[\"" + fqbn + report_size_trends.flash_heading_indicator + "\",\"" + fqbn + + report_size_trends.ram_heading_indicator + "\"]]") + Service.update.assert_called_once_with(spreadsheetId=spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads(board_data_headings_data)}) + Service.execute.assert_called_once() + + # @unittest.skip("") + def test_get_current_row(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + commit_hash = "test_commit_hash" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name="foo", + commit_hash=commit_hash, + commit_url="foo", + fqbn="foo", + flash=42, + ram=11) + Service.get = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock(return_value={"values": [["foo"], [commit_hash]]}) + report_size_trends.service = Service() + + self.assertEqual({"populated": True, "number": 2}, report_size_trends.get_current_row()) + spreadsheet_range = (sheet_name + "!" + report_size_trends.commit_hash_column_letter + ":" + + report_size_trends.commit_hash_column_letter) + Service.get.assert_called_once_with(spreadsheetId=spreadsheet_id, range=spreadsheet_range) + Service.execute.assert_called_once() + Service.execute = unittest.mock.MagicMock(return_value={"values": [["foo"], ["bar"]]}) + self.assertEqual({"populated": False, "number": 3}, report_size_trends.get_current_row()) + + # @unittest.skip("") + def test_create_row(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + sketch_name = "test_sketch_name" + fqbn = "test_fqbn" + commit_url = "test_commit_url" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name=sketch_name, + commit_hash="foo", + commit_url=commit_url, + fqbn=fqbn, + flash=42, + ram=11) + row_number = 42 + + Service.update = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock() + report_size_trends.service = Service() + + report_size_trends.create_row(row_number=row_number) + spreadsheet_range = (sheet_name + "!" + report_size_trends.shared_data_first_column_letter + str(row_number) + + ":" + report_size_trends.shared_data_last_column_letter + str(row_number)) + shared_data_columns_data = ("[[\"" + '{:%Y-%m-%d %H:%M:%S}'.format(datetime.datetime.now()) + "\",\"" + + sketch_name + "\",\"=HYPERLINK(\\\"" + report_size_trends.commit_url + + "\\\",T(\\\"" + report_size_trends.commit_hash + "\\\"))\"]]") + Service.update.assert_called_once_with(spreadsheetId=spreadsheet_id, + range=spreadsheet_range, + valueInputOption="USER_ENTERED", + body={"values": json.loads(shared_data_columns_data)}) + Service.execute.assert_called_once() + + # @unittest.skip("") + def test_write_memory_usage_data(self): + spreadsheet_id = "test_spreadsheet_id" + sheet_name = "test_sheet_name" + report_size_trends = ReportSizeTrends(google_key_file="foo", + spreadsheet_id=spreadsheet_id, + sheet_name=sheet_name, + sketch_name="foo", + commit_hash="foo", + commit_url="foo", + fqbn="foo", + flash=42, + ram=11) + flash_column_letter = "A" + ram_column_letter = "B" + row_number = 42 + flash = "11" + ram = "12" + + Service.update = unittest.mock.MagicMock(return_value=Service()) + Service.execute = unittest.mock.MagicMock() + report_size_trends.service = Service() + + report_size_trends.write_memory_usage_data(flash_column_letter=flash_column_letter, ram_column_letter=ram_column_letter, + row_number=row_number, flash=flash, ram=ram) + spreadsheet_range = (sheet_name + "!" + flash_column_letter + str(row_number) + ":" + + ram_column_letter + str(row_number)) + size_data = "[[" + flash + "," + ram + "]]" + Service.update.assert_called_once_with(spreadsheetId=spreadsheet_id, + range=spreadsheet_range, + valueInputOption="RAW", + body={"values": json.loads(size_data)}) + Service.execute.assert_called_once() + + +if __name__ == '__main__': + unittest.main() diff --git a/libraries/report-size-deltas/Dockerfile b/libraries/report-size-deltas/Dockerfile new file mode 100644 index 00000000..5631cce4 --- /dev/null +++ b/libraries/report-size-deltas/Dockerfile @@ -0,0 +1,8 @@ +FROM python:3.8.2 + +# Copies your code file from your action repository to the filesystem path `/` of the container +COPY reportsizedeltas.py /reportsizedeltas.py +RUN ["chmod", "+x", "reportsizedeltas.py"] + +# Code file to execute when the docker container starts up +ENTRYPOINT ["python", "/reportsizedeltas.py"] diff --git a/libraries/report-size-deltas/README.md b/libraries/report-size-deltas/README.md new file mode 100644 index 00000000..c3516960 --- /dev/null +++ b/libraries/report-size-deltas/README.md @@ -0,0 +1,26 @@ +# libraries/report-size-deltas action + +This action comments on the pull request with a report on the change in memory usage of an example sketch. This should be run from a [scheduled workflow](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#onschedule). + +## Inputs + +### `size-deltas-reports-artifact-name` + +Name of the workflow artifact that contains the memory usage data, as specified to the actions/upload-artifact action via the name argument + +### `github-token` + +GitHub access token used to comment the memory usage comparison results to the PR thread. Default [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token). + +## Example usage + +```yaml +on: + schedule: + - cron: '*/5 * * * *' +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: arduino/actions/libraries/report-size-deltas@master +``` diff --git a/libraries/report-size-deltas/action.yml b/libraries/report-size-deltas/action.yml new file mode 100644 index 00000000..613ca473 --- /dev/null +++ b/libraries/report-size-deltas/action.yml @@ -0,0 +1,12 @@ +name: 'Arduino Libraries - Report Size Deltas' +description: 'Comments on the pull request with a report on the change in memory usage of an example sketch' +inputs: + size-deltas-reports-artifact-name: + description: 'Name of the workflow artifact that contains the memory usage data, as specified to the actions/upload-artifact action via the name argument' + default: 'size-deltas-reports' + github-token: + description: 'GitHub access token used to comment the memory usage comparison results to the PR thread' + default: ${{ github.token }} +runs: + using: 'docker' + image: 'Dockerfile' diff --git a/libraries/report-size-deltas/reportsizedeltas.py b/libraries/report-size-deltas/reportsizedeltas.py new file mode 100644 index 00000000..7d7460e9 --- /dev/null +++ b/libraries/report-size-deltas/reportsizedeltas.py @@ -0,0 +1,470 @@ +import json +import logging +import os +import re +import sys +import tempfile +import time +import urllib.error +import urllib.parse +import urllib.request +import zipfile + +logging.basicConfig(level=logging.WARNING) +logger = logging.getLogger(__name__) + + +def main(): + set_verbosity(enable_verbosity=False) + + report_size_deltas = ReportSizeDeltas(repository_name=os.environ["GITHUB_REPOSITORY"], + artifact_name=os.environ["INPUT_SIZE-DELTAS-REPORTS-ARTIFACT-NAME"], + token=os.environ["INPUT_GITHUB-TOKEN"]) + + report_size_deltas.report_size_deltas() + + +def set_verbosity(enable_verbosity): + """Turn debug output on or off. + + Keyword arguments: + enable_verbosity -- this will generally be controlled via the script's --verbose command line argument + (True, False) + """ + # DEBUG: automatically generated output and all higher log level output + # INFO: manually specified output and all higher log level output + verbose_logging_level = logging.DEBUG + + if type(enable_verbosity) is not bool: + raise TypeError + if enable_verbosity: + logger.setLevel(level=verbose_logging_level) + else: + logger.setLevel(level=logging.WARNING) + + +class ReportSizeDeltas: + """Methods for creating and submitting the memory usage change reports + + Keyword arguments: + repository_name -- repository owner and name e.g., octocat/Hello-World + artifact_name -- name of the workflow artifact that contains the memory usage data + token -- GitHub access token + """ + report_key_beginning = "**Memory usage change @[" + + def __init__(self, repository_name, artifact_name, token): + self.repository_name = repository_name + self.artifact_name = artifact_name + self.token = token + + def report_size_deltas(self): + """Scan the repository's pull requests to see if any need reports and return a list of the reports submitted""" + # Get the repository's pull requests + logger.debug("Getting PRs for " + self.repository_name) + report_list = [] + page_number = 1 + page_count = 1 + while page_number <= page_count: + api_data = self.api_request(request="repos/" + self.repository_name + "/pulls", + page_number=page_number) + prs_data = api_data["json_data"] + for pr_data in prs_data: + # Note: closed PRs are not listed in the API response + pr_number = pr_data["number"] + pr_head_sha = pr_data["head"]["sha"] + logger.debug("Processing pull request #" + str(pr_number) + ", head SHA: " + pr_head_sha) + # When a PR is locked, only collaborators may comment. The automatically generated GITHUB_TOKEN will + # likely be used, which is owned by the github-actions bot, who doesn't have collaborator status. So + # locking the thread would cause the job to fail. + if pr_data["locked"]: + logger.debug("PR locked, skipping") + continue + + if self.report_exists(pr_number=pr_number, + pr_head_sha=pr_head_sha): + # Go on to the next PR + logger.debug("Report already exists") + continue + + artifact_download_url = self.get_artifact_download_url_for_sha(pr_user_login=pr_data["user"]["login"], + pr_head_ref=pr_data["head"]["ref"], + pr_head_sha=pr_head_sha) + if artifact_download_url is None: + # Go on to the next PR + logger.debug("No artifact found") + continue + + artifact_folder_object = self.get_artifact(artifact_download_url=artifact_download_url) + + report = self.generate_report(artifact_folder_object=artifact_folder_object, + pr_head_sha=pr_head_sha, + pr_number=pr_number) + + self.comment_report(pr_number=pr_number, report_markdown=report["markdown"]) + + report_list = report_list + [{"pr_number": pr_number, "report": report["data"]}] + + page_number += 1 + page_count = api_data["page_count"] + + return report_list + + def report_exists(self, pr_number, pr_head_sha): + """Return whether a report has already been commented to the pull request thread for the latest workflow run + + Keyword arguments: + pr_number -- number of the pull request to check + pr_head_sha -- PR's head branch hash + """ + # Get the pull request's comments + page_number = 1 + page_count = 1 + while page_number <= page_count: + api_data = self.api_request(request="repos/" + self.repository_name + "/issues/" + str(pr_number) + + "/comments", + page_number=page_number) + + comments_data = api_data["json_data"] + for comment_data in comments_data: + # Check if the comment is a report for the PR's head SHA + if comment_data["body"].startswith(self.report_key_beginning + pr_head_sha): + return True + + page_number += 1 + page_count = api_data["page_count"] + + # No reports found for the PR's head SHA + return False + + def get_artifact_download_url_for_sha(self, pr_user_login, pr_head_ref, pr_head_sha): + """Return the report artifact download URL associated with the given head commit hash + + Keyword arguments: + pr_user_login -- user name of the PR author (used to reduce number of GitHub API requests) + pr_head_ref -- name of the PR head branch (used to reduce number of GitHub API requests) + pr_head_sha -- hash of the head commit in the PR branch + """ + # Get the repository's workflow runs + page_number = 1 + page_count = 1 + while page_number <= page_count: + api_data = self.api_request(request="repos/" + self.repository_name + "/actions/runs", + request_parameters="actor=" + pr_user_login + "&branch=" + pr_head_ref + + "&event=pull_request&status=completed", + page_number=page_number) + runs_data = api_data["json_data"] + + # Find the runs with the head SHA of the PR (there may be multiple runs) + for run_data in runs_data["workflow_runs"]: + if run_data["head_sha"] == pr_head_sha: + # Check if this run has the artifact we're looking for + artifact_download_url = self.get_artifact_download_url_for_run(run_id=run_data["id"]) + if artifact_download_url is not None: + return artifact_download_url + + page_number += 1 + page_count = api_data["page_count"] + + # No matching artifact found + return None + + def get_artifact_download_url_for_run(self, run_id): + """Return the report artifact download URL associated with the given GitHub Actions workflow run + + Keyword arguments: + run_id -- GitHub Actions workflow run ID + """ + # Get the workflow run's artifacts + page_number = 1 + page_count = 1 + while page_number <= page_count: + api_data = self.api_request(request="repos/" + self.repository_name + "/actions/runs/" + + str(run_id) + "/artifacts", + page_number=page_number) + artifacts_data = api_data["json_data"] + + for artifact_data in artifacts_data["artifacts"]: + # The artifact is identified by a specific name + if artifact_data["name"] == self.artifact_name: + return artifact_data["archive_download_url"] + + page_number += 1 + page_count = api_data["page_count"] + + # No matching artifact found + return None + + def get_artifact(self, artifact_download_url): + """Download and unzip the artifact and return an object for the temporary directory containing it + + Keyword arguments: + artifact_download_url -- URL to download the artifact from GitHub + """ + # Create temporary folder + artifact_folder_object = tempfile.TemporaryDirectory(prefix="reportsizedeltas-") + try: + # Download artifact + with open(file=artifact_folder_object.name + "/" + self.artifact_name + ".zip", mode="wb") as out_file: + with self.raw_http_request(url=artifact_download_url) as fp: + out_file.write(fp.read()) + + # Unzip artifact + artifact_zip_file = artifact_folder_object.name + "/" + self.artifact_name + ".zip" + with zipfile.ZipFile(file=artifact_zip_file, mode="r") as zip_ref: + zip_ref.extractall(path=artifact_folder_object.name) + os.remove(artifact_zip_file) + + return artifact_folder_object + + except Exception: + artifact_folder_object.cleanup() + raise + + def generate_report(self, artifact_folder_object, pr_head_sha, pr_number): + """Parse the artifact files and returns a dictionary: + markdown -- Markdown formatted report text + data -- list containing all the report data + + Keyword arguments: + artifact_folder_object -- object containing the data about the temporary folder that stores the markdown files + """ + with artifact_folder_object as artifact_folder: + report_markdown = (self.report_key_beginning + pr_head_sha + "]" + + "(https://github.com/" + self.repository_name + "/pull/" + str(pr_number) + + "/commits/" + pr_head_sha + ")**\n\n") + report_markdown = report_markdown + "FQBN | Flash Usage | RAM For Global Variables\n---|---|---" + reports_data = [] + for report_filename in sorted(os.listdir(path=artifact_folder)): + with open(file=artifact_folder + "/" + report_filename) as report_file: + report_data = json.load(report_file) + reports_data = reports_data + [report_data] + report_markdown = (report_markdown + "\n" + + report_data["fqbn"] + + generate_value_cell(report_data["flash_delta"]) + + generate_value_cell(report_data["ram_delta"])) + + logger.debug("Report:\n" + report_markdown) + return {"markdown": report_markdown, "data": reports_data} + + def comment_report(self, pr_number, report_markdown): + """Submit the report as a comment on the PR thread + + Keyword arguments: + pr_number -- pull request number to submit the report to + report_markdown -- Markdown formatted report + """ + report_data = {"body": report_markdown} + report_data = json.dumps(obj=report_data) + report_data = report_data.encode(encoding="utf-8") + url = ("https://api.github.com/repos/" + + self.repository_name + + "/issues/" + + str(pr_number) + + "/comments") + + self.http_request(url=url, data=report_data) + + def api_request(self, request, request_parameters="", page_number=1): + """Do a GitHub API request. Return a dictionary containing: + json_data -- JSON object containing the response + additional_pages -- indicates whether more pages of results remain (True, False) + page_count -- total number of pages of results + + Keyword arguments: + request -- the section of the URL following https://api.github.com/ + request_parameters -- GitHub API request parameters (see: https://developer.github.com/v3/#parameters) + (default value: "") + page_number -- Some responses will be paginated. This argument specifies which page should be returned. + (default value: 1) + """ + return self.get_json_response(url="https://api.github.com/" + request + "?" + request_parameters + "&page=" + + str(page_number) + "&per_page=100") + + def get_json_response(self, url): + """Load the specified URL and return a dictionary: + json_data -- JSON object containing the response + additional_pages -- indicates whether more pages of results remain (True, False) + page_count -- total number of pages of results + + Keyword arguments: + url -- the URL to load + """ + try: + response_data = self.http_request(url=url) + try: + json_data = json.loads(response_data["body"]) + except json.decoder.JSONDecodeError as exception: + # Output some information on the exception + logger.warning(str(exception.__class__.__name__) + ": " + str(exception)) + # pass on the exception to the caller + raise exception + + if not json_data: + # There was no HTTP error but an empty list was returned (e.g. pulls API request when the repo + # has no open PRs) + page_count = 0 + additional_pages = False + else: + page_count = 1 + additional_pages = False + + if response_data["headers"]["Link"] is not None: + # Get the pagination data + if response_data["headers"]["Link"].find(">; rel=\"next\"") != -1: + additional_pages = True + for link in response_data["headers"]["Link"].split(","): + if link[-13:] == ">; rel=\"last\"": + link = re.split("[?&>]", link) + for parameter in link: + if parameter[:5] == "page=": + page_count = int(parameter.split("=")[1]) + break + break + + return {"json_data": json_data, "additional_pages": additional_pages, "page_count": page_count} + except Exception as exception: + raise exception + + def http_request(self, url, data=None): + """Make a request and return a dictionary: + read -- the response + info -- headers + url -- the URL of the resource retrieved + + Keyword arguments: + url -- the URL to load + data -- data to pass with the request + (default value: None) + """ + with self.raw_http_request(url=url, data=data) as response_object: + return {"body": response_object.read().decode(encoding="utf-8", errors="ignore"), + "headers": response_object.info(), + "url": response_object.geturl()} + + def raw_http_request(self, url, data=None): + """Make a request and return an object containing the response. + + Keyword arguments: + url -- the URL to load + data -- data to pass with the request + (default value: None) + """ + # Maximum times to retry opening the URL before giving up + maximum_urlopen_retries = 3 + + logger.info("Opening URL: " + url) + + # GitHub recommends using user name as User-Agent (https://developer.github.com/v3/#user-agent-required) + headers = {"Authorization": "token " + self.token, "User-Agent": self.repository_name.split("/")[0]} + request = urllib.request.Request(url=url, headers=headers, data=data) + + retry_count = 0 + while retry_count <= maximum_urlopen_retries: + retry_count += 1 + try: + # The rate limit API is not subject to rate limiting + if not url.startswith("https://api.github.com/rate_limit"): + self.handle_rate_limiting() + return urllib.request.urlopen(url=request) + except Exception as exception: + if not determine_urlopen_retry(exception=exception): + raise exception + + # Maximum retries reached without successfully opening URL + raise TimeoutError("Maximum number of URL load retries exceeded") + + def handle_rate_limiting(self): + """Check whether the GitHub API request limit has been reached. + If so, exit with exit status 0. + """ + rate_limiting_data = self.get_json_response(url="https://api.github.com/rate_limit")["json_data"] + # GitHub has two API types, each with their own request limits and counters. + # "search" applies only to api.github.com/search. + # "core" applies to all other parts of the API. + # Since this code only uses the "core" API, only those values are relevant + logger.debug("GitHub core API request allotment: " + str(rate_limiting_data["resources"]["core"]["limit"])) + logger.debug("Remaining API requests: " + str(rate_limiting_data["resources"]["core"]["remaining"])) + logger.debug("API request count reset time: " + str(rate_limiting_data["resources"]["core"]["reset"])) + + if rate_limiting_data["resources"]["core"]["remaining"] == 0: + # GitHub uses a fixed rate limit window of 60 minutes. The window starts when the API request count goes + # from 0 to 1. 60 minutes after the start of the window, the request count is reset to 0. + logger.warning("GitHub API request quota has been reached. Try again later.") + sys.exit(0) + + +def determine_urlopen_retry(exception): + """Determine whether the exception warrants another attempt at opening the URL. + If so, delay then return True. Otherwise, return False. + + Keyword arguments: + exception -- the exception + """ + # Retry urlopen after exceptions that start with the following strings + urlopen_retry_exceptions = [ + # urllib.error.HTTPError: HTTP Error 403: Forbidden + "HTTPError: HTTP Error 403", + # urllib.error.HTTPError: HTTP Error 502: Bad Gateway + "HTTPError: HTTP Error 502", + # urllib.error.HTTPError: HTTP Error 503: Service Unavailable + # caused by rate limiting + "HTTPError: HTTP Error 503", + # http.client.RemoteDisconnected: Remote end closed connection without response + "RemoteDisconnected", + # ConnectionResetError: [Errno 104] Connection reset by peer + "ConnectionResetError", + # ConnectionRefusedError: [WinError 10061] No connection could be made because the target machine actively + # refused it + "ConnectionRefusedError", + # urllib.error.URLError: + "" + ] + + # Delay before retry (seconds) + urlopen_retry_delay = 30 + + exception_string = str(exception.__class__.__name__) + ": " + str(exception) + logger.info(exception_string) + for urlopen_retry_exception in urlopen_retry_exceptions: + if str(exception_string).startswith(urlopen_retry_exception): + # These errors may only be temporary, retry + logger.warning("Temporarily unable to open URL (" + str(exception) + "), retrying") + time.sleep(urlopen_retry_delay) + return True + + # Other errors are probably permanent so give up + if str(exception_string).startswith("urllib.error.HTTPError: HTTP Error 401"): + # Give a nice hint as to the cause of this error + logger.error(exception) + logger.info("HTTP Error 401 may be caused by providing an incorrect GitHub personal access token.") + return False + + +def generate_value_cell(value): + """Return the Markdown formatted text for a memory change data cell in the report table + + Keyword arguments: + value -- amount of memory change + """ + size_decrease_emoji = ":green_heart:" + size_increase_emoji = ":small_red_triangle:" + + cell = " | " + if value == "N/A": + pass + elif value > 0: + cell = cell + size_increase_emoji + " +" + elif value < 0: + cell = cell + size_decrease_emoji + " " + else: + pass + + return cell + str(value) + + +# Only execute the following code if the script is run directly, not imported +if __name__ == "__main__": + main() diff --git a/libraries/report-size-deltas/tests/data/size-deltas-reports/adafruit-samd-adafruit_feather_m0.json b/libraries/report-size-deltas/tests/data/size-deltas-reports/adafruit-samd-adafruit_feather_m0.json new file mode 100644 index 00000000..c7c8d6c9 --- /dev/null +++ b/libraries/report-size-deltas/tests/data/size-deltas-reports/adafruit-samd-adafruit_feather_m0.json @@ -0,0 +1,10 @@ +{ + "fqbn": "adafruit:samd:adafruit_feather_m0", + "sketch": "examples/ConnectionHandlerDemo", + "previous_flash": 10580, + "flash": 10580, + "flash_delta": 0, + "previous_ram": "N/A", + "ram": "N/A", + "ram_delta": "N/A" +} diff --git a/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrgsm1400.json b/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrgsm1400.json new file mode 100644 index 00000000..3fdad887 --- /dev/null +++ b/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrgsm1400.json @@ -0,0 +1,10 @@ +{ + "fqbn": "arduino:samd:mkrgsm1400", + "sketch": "examples/ConnectionHandlerDemo", + "previous_flash": "N/A", + "flash": 51636, + "flash_delta": "N/A", + "previous_ram": "N/A", + "ram": 5104, + "ram_delta": "N/A" +} diff --git a/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrnb1500.json b/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrnb1500.json new file mode 100644 index 00000000..5bfb0df2 --- /dev/null +++ b/libraries/report-size-deltas/tests/data/size-deltas-reports/arduino-samd-mkrnb1500.json @@ -0,0 +1,10 @@ +{ + "fqbn": "arduino:samd:mkrnb1500", + "sketch": "examples/ConnectionHandlerDemo", + "previous_flash": 50964, + "flash": 50940, + "flash_delta": -24, + "previous_ram": 5068, + "ram": 5068, + "ram_delta": 0 +} diff --git a/libraries/report-size-deltas/tests/data/size-deltas-reports/esp8266-esp8266-huzzah.json b/libraries/report-size-deltas/tests/data/size-deltas-reports/esp8266-esp8266-huzzah.json new file mode 100644 index 00000000..33675681 --- /dev/null +++ b/libraries/report-size-deltas/tests/data/size-deltas-reports/esp8266-esp8266-huzzah.json @@ -0,0 +1,10 @@ +{ + "fqbn": "esp8266:esp8266:huzzah", + "sketch": "examples/ConnectionHandlerDemo", + "previous_flash": 274588, + "flash": 274620, + "flash_delta": 32, + "previous_ram": 27480, + "ram": 27496, + "ram_delta": 16 +} diff --git a/libraries/report-size-deltas/tests/test_reportsizedeltas.py b/libraries/report-size-deltas/tests/test_reportsizedeltas.py new file mode 100644 index 00000000..7c7c5f41 --- /dev/null +++ b/libraries/report-size-deltas/tests/test_reportsizedeltas.py @@ -0,0 +1,413 @@ +import distutils.dir_util +import unittest.mock + +from reportsizedeltas import * + + +# noinspection PyUnresolvedReferences +class TestReportsizedeltas(unittest.TestCase): + # NOTE: the tests are run in order sorted by method name, not in the order below + + set_verbosity(enable_verbosity=False) + + # @unittest.skip("") + def test_set_verbosity(self): + with self.assertRaises(TypeError): + set_verbosity(enable_verbosity=2) + set_verbosity(enable_verbosity=True) + set_verbosity(enable_verbosity=False) + + # @unittest.skip("") + def test_report_size_deltas(self): + repository_name = "test_name/test_repo" + artifact_download_url = "test_artifact_download_url" + artifact_folder_object = "test_artifact_folder_object" + report = {"markdown": "test_markdown", "data": "test_data"} + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name="foo", token="foo") + + json_data = [{"number": 1, "locked": True, "head": {"sha": "foo123", "ref": "asdf"}, "user": {"login": "1234"}}, + {"number": 2, "locked": True, "head": {"sha": "foo123", "ref": "asdf"}, + "user": {"login": "1234"}}] + report_size_deltas.api_request = unittest.mock.MagicMock(return_value={"json_data": json_data, + "additional_pages": True, + "page_count": 1}) + + # Test handling of locked PR + self.assertEqual([], report_size_deltas.report_size_deltas()) + calls = [unittest.mock.call(request="repos/" + repository_name + "/pulls", + page_number=1)] + report_size_deltas.api_request.assert_has_calls(calls) + + # Test handling of existing reports + report_size_deltas.report_exists = unittest.mock.MagicMock(return_value=True) + + for pr_data in json_data: + pr_data["locked"] = False + + self.assertEqual([], report_size_deltas.report_size_deltas()) + + calls = [] + for pr_data in json_data: + calls = calls + [unittest.mock.call(pr_number=pr_data["number"], pr_head_sha=json_data[0]["head"]["sha"])] + + report_size_deltas.report_exists.assert_has_calls(calls) + + # Test handling of no report artifact + report_size_deltas.report_exists = unittest.mock.MagicMock(return_value=False) + + report_size_deltas.get_artifact_download_url_for_sha = unittest.mock.MagicMock(return_value=None) + + self.assertEqual([], report_size_deltas.report_size_deltas()) + + calls = [] + for pr_data in json_data: + calls = calls + [unittest.mock.call(pr_user_login=pr_data["user"]["login"], + pr_head_ref=pr_data["head"]["ref"], + pr_head_sha=pr_data["head"]["sha"])] + + report_size_deltas.get_artifact_download_url_for_sha.assert_has_calls(calls) + + # Test making reports + report_size_deltas.get_artifact_download_url_for_sha = unittest.mock.MagicMock( + return_value=artifact_download_url) + + report_size_deltas.get_artifact = unittest.mock.MagicMock(return_value=artifact_folder_object) + + report_size_deltas.generate_report = unittest.mock.MagicMock(return_value=report) + + report_size_deltas.comment_report = unittest.mock.MagicMock() + + report_list = [] + for pr_data in json_data: + report_list = report_list + [{"pr_number": pr_data["number"], "report": report["data"]}] + self.assertEqual(report_list, report_size_deltas.report_size_deltas()) + + # @unittest.skip("") + def test_report_exists(self): + repository_name = "test_name/test_repo" + artifact_name = "test_artifact_name" + pr_number = 42 + pr_head_sha = "foo123" + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name=artifact_name, token="foo") + + json_data = [{"body": "foo123"}, {"body": report_size_deltas.report_key_beginning + pr_head_sha + "foo"}] + report_size_deltas.api_request = unittest.mock.MagicMock(return_value={"json_data": json_data, + "additional_pages": False, + "page_count": 1}) + + self.assertTrue(report_size_deltas.report_exists(pr_number=pr_number, pr_head_sha=pr_head_sha)) + + report_size_deltas.api_request.assert_called_once_with(request="repos/" + repository_name + "/issues/" + + str(pr_number) + "/comments", + page_number=1) + + self.assertFalse(report_size_deltas.report_exists(pr_number=pr_number, pr_head_sha="asdf")) + + # @unittest.skip("") + def test_get_artifact_download_url_for_sha(self): + repository_name = "test_name/test_repo" + pr_user_login = "test_pr_user_login" + pr_head_ref = "test_pr_head_ref" + pr_head_sha = "bar123" + test_artifact_url = "test_artifact_url" + run_id = "4567" + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name="foo", token="foo") + + json_data = {"workflow_runs": [{"head_sha": "foo123", "id": "1234"}, {"head_sha": pr_head_sha, "id": run_id}]} + report_size_deltas.api_request = unittest.mock.MagicMock(return_value={"json_data": json_data, + "additional_pages": True, + "page_count": 3}) + report_size_deltas.get_artifact_download_url_for_run = unittest.mock.MagicMock(return_value=None) + + # No SHA match + self.assertEqual(None, report_size_deltas.get_artifact_download_url_for_sha(pr_user_login=pr_user_login, + pr_head_ref=pr_head_ref, + pr_head_sha="foosha")) + + # Test pagination + request = "repos/" + repository_name + "/actions/runs" + request_parameters = ("actor=" + pr_user_login + "&branch=" + pr_head_ref + + "&event=pull_request&status=completed") + calls = [unittest.mock.call(request=request, request_parameters=request_parameters, page_number=1), + unittest.mock.call(request=request, request_parameters=request_parameters, page_number=2), + unittest.mock.call(request=request, request_parameters=request_parameters, page_number=3)] + report_size_deltas.api_request.assert_has_calls(calls) + + # SHA match, but no artifact for run + self.assertEqual(None, report_size_deltas.get_artifact_download_url_for_sha(pr_user_login=pr_user_login, + pr_head_ref=pr_head_ref, + pr_head_sha=pr_head_sha)) + + report_size_deltas.get_artifact_download_url_for_run = unittest.mock.MagicMock(return_value=test_artifact_url) + + # SHA match, artifact match + self.assertEqual(test_artifact_url, + report_size_deltas.get_artifact_download_url_for_sha(pr_user_login=pr_user_login, + pr_head_ref=pr_head_ref, + pr_head_sha=pr_head_sha)) + + report_size_deltas.get_artifact_download_url_for_run.assert_called_once_with(run_id=run_id) + + # @unittest.skip("") + def test_get_artifact_download_url_for_run(self): + repository_name = "test_name/test_repo" + artifact_name = "test_artifact_name" + archive_download_url = "archive_download_url" + run_id = "1234" + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name=artifact_name, token="foo") + + json_data = {"artifacts": [{"name": artifact_name, "archive_download_url": archive_download_url}, + {"name": "bar123", "archive_download_url": "wrong_artifact_url"}]} + report_size_deltas.api_request = unittest.mock.MagicMock(return_value={"json_data": json_data, + "additional_pages": False, + "page_count": 1}) + + # Artifact name match + self.assertEqual(archive_download_url, report_size_deltas.get_artifact_download_url_for_run(run_id=run_id)) + + report_size_deltas.api_request.assert_called_once_with( + request="repos/" + repository_name + "/actions/runs/" + str(run_id) + + "/artifacts", + page_number=1) + + json_data = {"artifacts": [{"name": "foo123", "archive_download_url": "test_artifact_url"}, + {"name": "bar123", "archive_download_url": "wrong_artifact_url"}]} + report_size_deltas.api_request = unittest.mock.MagicMock(return_value={"json_data": json_data, + "additional_pages": False, + "page_count": 1}) + + # No artifact name match + self.assertEqual(None, report_size_deltas.get_artifact_download_url_for_run(run_id=run_id)) + + # # TODO + # def test_get_artifact(self): + + # @unittest.skip("") + def test_generate_report(self): + pr_head_sha = "asdf123" + pr_number = 42 + repository_name = "test_user/test_repo" + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name="foo", token="foo") + + artifact_folder_object = tempfile.TemporaryDirectory(prefix="test_reportsizedeltas-") + try: + distutils.dir_util.copy_tree(src="data/size-deltas-reports", dst=artifact_folder_object.name) + except Exception: + artifact_folder_object.cleanup() + raise + + report = report_size_deltas.generate_report(artifact_folder_object=artifact_folder_object, + pr_head_sha=pr_head_sha, pr_number=pr_number) + report_markdown = ( + report_size_deltas.report_key_beginning + pr_head_sha + + "](https://github.com/" + repository_name + "/pull/" + str(pr_number) + "/commits/" + pr_head_sha + + ")**\n\n" + "FQBN | Flash Usage | RAM For Global Variables\n" + "---|---|---\n" + "adafruit:samd:adafruit_feather_m0 | 0 | N/A\n" + "arduino:samd:mkrgsm1400 | N/A | N/A\n" + "arduino:samd:mkrnb1500 | :green_heart: -24 | 0\n" + "esp8266:esp8266:huzzah | :small_red_triangle: +32 | :small_red_triangle: +16") + self.assertEqual(report_markdown, report["markdown"]) + + report_data = [{'flash': 10580, + 'flash_delta': 0, + 'fqbn': 'adafruit:samd:adafruit_feather_m0', + 'previous_flash': 10580, + 'previous_ram': 'N/A', + 'ram': 'N/A', + 'ram_delta': 'N/A', + 'sketch': 'examples/ConnectionHandlerDemo'}, + {'flash': 51636, + 'flash_delta': 'N/A', + 'fqbn': 'arduino:samd:mkrgsm1400', + 'previous_flash': 'N/A', + 'previous_ram': 'N/A', + 'ram': 5104, + 'ram_delta': 'N/A', + 'sketch': 'examples/ConnectionHandlerDemo'}, + {'flash': 50940, + 'flash_delta': -24, + 'fqbn': 'arduino:samd:mkrnb1500', + 'previous_flash': 50964, + 'previous_ram': 5068, + 'ram': 5068, + 'ram_delta': 0, + 'sketch': 'examples/ConnectionHandlerDemo'}, + {'flash': 274620, + 'flash_delta': 32, + 'fqbn': 'esp8266:esp8266:huzzah', + 'previous_flash': 274588, + 'previous_ram': 27480, + 'ram': 27496, + 'ram_delta': 16, + 'sketch': 'examples/ConnectionHandlerDemo'}] + self.assertEqual(report_data, report["data"]) + + # @unittest.skip("") + def test_comment_report(self): + pr_number = 42 + report_markdown = "test_report_markdown" + repository_name = "test_user/test_repo" + + report_size_deltas = ReportSizeDeltas(repository_name=repository_name, artifact_name="foo", token="foo") + + report_size_deltas.http_request = unittest.mock.MagicMock() + + report_size_deltas.comment_report(pr_number=pr_number, report_markdown=report_markdown) + + report_data = {"body": report_markdown} + report_data = json.dumps(obj=report_data) + report_data = report_data.encode(encoding="utf-8") + + report_size_deltas.http_request.assert_called_once_with( + url="https://api.github.com/repos/" + repository_name + "/issues/" + + str(pr_number) + "/comments", + data=report_data) + + # @unittest.skip("") + def test_api_request(self): + response_data = {"json_data": {"foo": "bar"}, + "additional_pages": False, + "page_count": 1} + request = "test_request" + request_parameters = "test_parameters" + page_number = 1 + + report_size_deltas = ReportSizeDeltas(repository_name="foo", artifact_name="foo", token="foo") + + report_size_deltas.get_json_response = unittest.mock.MagicMock(return_value=response_data) + + self.assertEqual(response_data, report_size_deltas.api_request(request=request, + request_parameters=request_parameters, + page_number=page_number)) + report_size_deltas.get_json_response.assert_called_once_with( + url="https://api.github.com/" + request + "?" + request_parameters + + "&page=" + str(page_number) + "&per_page=100") + + # @unittest.skip("") + def test_get_json_response(self): + response = {"headers": {"Link": None}, "body": "[]"} + url = "test_url" + + report_size_deltas = ReportSizeDeltas(repository_name="foo", artifact_name="foo", token="foo") + + report_size_deltas.http_request = unittest.mock.MagicMock(return_value=response) + + # Empty body + response_data = report_size_deltas.get_json_response(url=url) + self.assertEqual(json.loads(response["body"]), response_data["json_data"]) + self.assertFalse(response_data["additional_pages"]) + self.assertEqual(0, response_data["page_count"]) + report_size_deltas.http_request.assert_called_once_with(url=url) + + response = {"headers": {"Link": None}, "body": "[42]"} + report_size_deltas.http_request = unittest.mock.MagicMock(return_value=response) + + # Non-empty body, Link field is None + response_data = report_size_deltas.get_json_response(url=url) + self.assertEqual(json.loads(response["body"]), response_data["json_data"]) + self.assertFalse(response_data["additional_pages"]) + self.assertEqual(1, response_data["page_count"]) + + response = {"headers": {"Link": '; rel="next", ' + '"; rel="last"'}, + "body": "[42]"} + report_size_deltas.http_request = unittest.mock.MagicMock(return_value=response) + + # Non-empty body, Link field is populated + response_data = report_size_deltas.get_json_response(url=url) + self.assertEqual(json.loads(response["body"]), response_data["json_data"]) + self.assertTrue(response_data["additional_pages"]) + self.assertEqual(4, response_data["page_count"]) + + # @unittest.skip("") + def test_http_request(self): + url = "test_url" + data = "test_data" + + report_size_deltas = ReportSizeDeltas(repository_name="foo", artifact_name="foo", token="foo") + + report_size_deltas.raw_http_request = unittest.mock.MagicMock() + + report_size_deltas.http_request(url=url, data=data) + + report_size_deltas.raw_http_request.assert_called_once_with(url=url, data=data) + + # @unittest.skip("") + def test_raw_http_request(self): + user_name = "test_user" + repo_name = "test_repo" + token = "test_token" + url = "test_url" + data = "test_data" + request = "test_request" + + report_size_deltas = ReportSizeDeltas(repository_name=user_name + "/" + repo_name, artifact_name="foo", + token=token) + + urllib.request.Request = unittest.mock.MagicMock(return_value=request) + report_size_deltas.handle_rate_limiting = unittest.mock.MagicMock() + urllib.request.urlopen = unittest.mock.MagicMock() + + report_size_deltas.raw_http_request(url=url, data=data) + + urllib.request.Request.assert_called_once_with(url=url, + headers={"Authorization": "token " + token, + "User-Agent": user_name}, + data=data) + + # URL != https://api.github.com/rate_limit + report_size_deltas.handle_rate_limiting.assert_called_once() + + report_size_deltas.handle_rate_limiting.reset_mock() + urllib.request.urlopen.reset_mock() + + url = "https://api.github.com/rate_limit" + report_size_deltas.raw_http_request(url=url, data=data) + + # URL == https://api.github.com/rate_limit + report_size_deltas.handle_rate_limiting.assert_not_called() + + urllib.request.urlopen.assert_called_once_with(url=request) + + # @unittest.skip("") + def test_handle_rate_limiting(self): + report_size_deltas = ReportSizeDeltas(repository_name="foo", artifact_name="foo", token="foo") + + json_data = {"json_data": {"resources": {"core": {"remaining": 0, "reset": 1234, "limit": 42}}}} + report_size_deltas.get_json_response = unittest.mock.MagicMock(return_value=json_data) + + # noinspection PyTypeChecker + with self.assertRaises(SystemExit) as cm: + report_size_deltas.handle_rate_limiting() + self.assertEqual(cm.exception.code, 0) + + report_size_deltas.get_json_response.assert_called_once_with(url="https://api.github.com/rate_limit") + + json_data["json_data"]["resources"]["core"]["remaining"] = 42 + report_size_deltas.handle_rate_limiting() + + @unittest.skip("disabled because it causes a delay") + def test_determine_urlopen_retry_true(self): + self.assertTrue(determine_urlopen_retry(exception=urllib.error.HTTPError(None, 502, "Bad Gateway", None, None))) + + # @unittest.skip("") + def test_determine_urlopen_retry_false(self): + self.assertFalse(determine_urlopen_retry(exception=urllib.error.HTTPError(None, 404, "Not Found", None, None))) + + # @unittest.skip("") + def test_generate_value_cell(self): + self.assertEqual(" | :small_red_triangle: +42", generate_value_cell(42)) + self.assertEqual(" | 0", generate_value_cell(0)) + self.assertEqual(" | :green_heart: -42", generate_value_cell(-42)) + self.assertEqual(" | N/A", generate_value_cell("N/A")) + + +if __name__ == '__main__': + unittest.main()