Просмотр исходного кода

Added new scripts to CI results

- Added to GitHub statuses (61 results)

- Reworked generated release table to include these (16 results, only thumb)

These also required a surprisingly large number of other changes:

- Bumbed CI Ubuntu version 18.04 -> 20.04, 22.04 is already on the
  horizon but not usable in GitHub yet

- Manualy upgrade to GCC v10, this is required for the -fcallgraph-info
  flag that scripts/stack.py uses.

- Increased paginated status queries to 100 per-page. If we have more
  statuses than this the status diffs may get much more complicated...

- Forced whitespace in generated release table to always be nbsp. GitHub
  tables get scrunched rather ugly without this, prefering margins to
  readable tables.

- Added limited support for "∞" results, since this is returned by
  ./scripts/stack.py for recursive functions.

As a side-note, this increases the number of statuses reported
per-commit from 6 to 61, so hopefully that doesn't cause any problems...
Christopher Haster 3 лет назад
Родитель
Сommit
9d54603ce2

+ 1 - 1
.github/workflows/post-release.yml

@@ -6,7 +6,7 @@ on:
 
 jobs:
   post-release:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     steps:
       # trigger post-release in dependency repo, this indirection allows the
       # dependency repo to be updated often without affecting this repo. At

+ 61 - 80
.github/workflows/release.yml

@@ -7,7 +7,7 @@ on:
 
 jobs:
   release:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
 
     # need to manually check for a couple things
     # - tests passed?
@@ -73,89 +73,70 @@ jobs:
           # previous results to compare against?
           [ -n "$LFS_PREV_VERSION" ] && curl -sS \
             "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/`
-              `status/$LFS_PREV_VERSION" \
+              `status/$LFS_PREV_VERSION?per_page=100" \
             | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]' \
             >> prev-results.json \
             || true
 
-          # unfortunately these each have their own format
-          [ -e results/code-thumb.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / code").description
-                  | capture("Code size is (?<result>[0-9]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/code.py -u results/code-thumb.csv --summary | awk '
-              NR==2 {printf "Code size,%d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-          [ -e results/code-thumb-readonly.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / code (readonly)").description
-                  | capture("Code size is (?<result>[0-9]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/code.py -u results/code-thumb-readonly.csv --summary | awk '
-              NR==2 {printf "Code size<br/>(readonly),%d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-          [ -e results/code-thumb-threadsafe.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / code (threadsafe)").description
-                  | capture("Code size is (?<result>[0-9]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/code.py -u results/code-thumb-threadsafe.csv --summary | awk '
-              NR==2 {printf "Code size<br/>(threadsafe),%d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-          [ -e results/code-thumb-migrate.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / code (migrate)").description
-                  | capture("Code size is (?<result>[0-9]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/code.py -u results/code-thumb-migrate.csv --summary | awk '
-              NR==2 {printf "Code size<br/>(migrate),%d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-          [ -e results/code-thumb-error-asserts.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / code (error-asserts)").description
-                  | capture("Code size is (?<result>[0-9]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/code.py -u results/code-thumb-error-asserts.csv --summary | awk '
-              NR==2 {printf "Code size<br/>(error-asserts),%d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-          [ -e results/coverage.csv ] && ( \
-            export PREV="$(jq -re '
-                  select(.context == "results / coverage").description
-                  | capture("Coverage is (?<result>[0-9\\.]+)").result' \
-                prev-results.json || echo 0)"
-            ./scripts/coverage.py -u results/coverage.csv --summary | awk -F '[ /%]+' '
-              NR==2 {printf "Coverage,%.1f%% of %d lines",$4,$3}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",$4-ENVIRON["PREV"]}
-              NR==2 {printf "\n"}' \
-            >> results.csv)
-
-          # transpose to GitHub table
-          [ -e results.csv ] || exit 0
-          awk -F ',' '
-            {label[NR]=$1; value[NR]=$2}
-            END {
-              for (r=1; r<=NR; r++) {printf "| %s ",label[r]}; printf "|\n";
-              for (r=1; r<=NR; r++) {printf "|:--"}; printf "|\n";
-              for (r=1; r<=NR; r++) {printf "| %s ",value[r]}; printf "|\n"}' \
-            results.csv > results.txt
-          echo "RESULTS:"
+          # build table for GitHub
+          echo "<table>" >> results.txt
+          echo "<thead>" >> results.txt
+          echo "<tr>" >> results.txt
+          echo "<th align=left>Configuration</th>" >> results.txt
+          for r in Code Stack Structs Coverage
+          do
+            echo "<th align=right>$r</th>" >> results.txt
+          done
+          echo "</tr>" >> results.txt
+          echo "</thead>" >> results.txt
+
+          echo "<tbody>" >> results.txt
+          for c in "" readonly threadsafe migrate error-asserts
+          do
+            echo "<tr>" >> results.txt
+            c_or_default=${c:-default}
+            echo "<td align=left>${c_or_default^}</td>" >> results.txt
+            for r in code stack structs
+            do
+              # per-config results
+              echo "<td align=right>" >> results.txt
+              [ -e results/thumb${c:+-$c}.csv ] && ( \
+                export PREV="$(jq -re '
+                      select(.context == "'"results (thumb${c:+, $c}) / $r"'").description
+                      | capture("(?<result>[0-9∞]+)").result' \
+                    prev-results.json || echo 0)"
+                ./scripts/summary.py results/thumb${c:+-$c}.csv -f $r -Y | awk '
+                  NR==2 {printf "%s B",$2}
+                  NR==2 && ENVIRON["PREV"]+0 != 0 {
+                    printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}
+                  NR==2 {printf "\n"}' \
+                | sed -e 's/ /\&nbsp;/g' \
+                >> results.txt)
+              echo "</td>" >> results.txt
+            done
+            # coverage results
+            if [ -z $c ]
+            then
+              echo "<td rowspan=0 align=right>" >> results.txt
+              [ -e results/coverage.csv ] && ( \
+                export PREV="$(jq -re '
+                      select(.context == "results / coverage").description
+                      | capture("(?<result>[0-9\\.]+)").result' \
+                    prev-results.json || echo 0)"
+                ./scripts/coverage.py -u results/coverage.csv -Y | awk -F '[ /%]+' '
+                  NR==2 {printf "%.1f%% of %d lines",$4,$3}
+                  NR==2 && ENVIRON["PREV"]+0 != 0 {
+                    printf " (%+.1f%%)",$4-ENVIRON["PREV"]}
+                  NR==2 {printf "\n"}' \
+                | sed -e 's/ /\&nbsp;/g' \
+                >> results.txt)
+              echo "</td>" >> results.txt
+            fi
+            echo "</tr>" >> results.txt
+          done
+          echo "</tbody>" >> results.txt
+          echo "</table>" >> results.txt
+
           cat results.txt
 
       # find changes from history

+ 1 - 1
.github/workflows/status.yml

@@ -6,7 +6,7 @@ on:
 
 jobs:
   status:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     steps:
       # custom statuses?
       - uses: dawidd6/action-download-artifact@v2

+ 81 - 67
.github/workflows/test.yml

@@ -8,7 +8,7 @@ env:
 jobs:
   # run tests
   test:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     strategy:
       fail-fast: false
       matrix:
@@ -18,11 +18,11 @@ jobs:
       - uses: actions/checkout@v2
       - name: install
         run: |
-          # need toml, also pip3 isn't installed by default?
+          # need a few additional tools
           sudo apt-get update -qq
           sudo apt-get install -qq python3 python3-pip lcov
           sudo pip3 install toml
-          gcc --version
+          python3 --version
 
           # setup a ram-backed disk to speed up reentrant tests
           mkdir disks
@@ -36,41 +36,49 @@ jobs:
 
           echo "TESTFLAGS=$TESTFLAGS" >> $GITHUB_ENV
 
+      # we're not cross-compiling with x86, but we do need the newest
+      # version of gcc for the -fcallgraph-info=su flag
+      - name: install-x86_64
+        if: ${{matrix.arch == 'x86_64'}}
+        run: |
+          sudo apt-get install -qq gcc-10
+          echo "CC=gcc-10" >> $GITHUB_ENV
+          gcc-10 --version
       # cross-compile with ARM Thumb (32-bit, little-endian)
       - name: install-thumb
         if: ${{matrix.arch == 'thumb'}}
         run: |
           sudo apt-get install -qq \
-            gcc-arm-linux-gnueabi \
+            gcc-10-arm-linux-gnueabi \
             libc6-dev-armel-cross \
             qemu-user
-          echo "CC=arm-linux-gnueabi-gcc -mthumb --static" >> $GITHUB_ENV
+          echo "CC=arm-linux-gnueabi-gcc-10 -mthumb --static" >> $GITHUB_ENV
           echo "EXEC=qemu-arm" >> $GITHUB_ENV
-          arm-linux-gnueabi-gcc --version
+          arm-linux-gnueabi-gcc-10 --version
           qemu-arm -version
       # cross-compile with MIPS (32-bit, big-endian)
       - name: install-mips
         if: ${{matrix.arch == 'mips'}}
         run: |
           sudo apt-get install -qq \
-            gcc-mips-linux-gnu \
+            gcc-10-mips-linux-gnu \
             libc6-dev-mips-cross \
             qemu-user
-          echo "CC=mips-linux-gnu-gcc --static" >> $GITHUB_ENV
+          echo "CC=mips-linux-gnu-gcc-10 --static" >> $GITHUB_ENV
           echo "EXEC=qemu-mips" >> $GITHUB_ENV
-          mips-linux-gnu-gcc --version
+          mips-linux-gnu-gcc-10 --version
           qemu-mips -version
       # cross-compile with PowerPC (32-bit, big-endian)
       - name: install-powerpc
         if: ${{matrix.arch == 'powerpc'}}
         run: |
           sudo apt-get install -qq \
-            gcc-powerpc-linux-gnu \
+            gcc-10-powerpc-linux-gnu \
             libc6-dev-powerpc-cross \
             qemu-user
-          echo "CC=powerpc-linux-gnu-gcc --static" >> $GITHUB_ENV
+          echo "CC=powerpc-linux-gnu-gcc-10 --static" >> $GITHUB_ENV
           echo "EXEC=qemu-ppc" >> $GITHUB_ENV
-          powerpc-linux-gnu-gcc --version
+          powerpc-linux-gnu-gcc-10 --version
           qemu-ppc -version
 
       # make sure example can at least compile
@@ -148,102 +156,108 @@ jobs:
           retention-days: 1
 
       # update results
-      - name: results-code
+      - name: results
         run: |
           mkdir -p results
           make clean
-          make code \
+          make lfs.csv \
             CFLAGS+=" \
               -DLFS_NO_ASSERT \
               -DLFS_NO_DEBUG \
               -DLFS_NO_WARN \
-              -DLFS_NO_ERROR" \
-            CODEFLAGS+="-o results/code-${{matrix.arch}}.csv"
-      - name: results-code-readonly
+              -DLFS_NO_ERROR"
+          cp lfs.csv results/${{matrix.arch}}.csv
+          ./scripts/summary.py results/${{matrix.arch}}.csv
+      - name: results-readonly
         run: |
           mkdir -p results
           make clean
-          make code \
+          make lfs.csv \
             CFLAGS+=" \
               -DLFS_NO_ASSERT \
               -DLFS_NO_DEBUG \
               -DLFS_NO_WARN \
               -DLFS_NO_ERROR \
-              -DLFS_READONLY" \
-            CODEFLAGS+="-o results/code-${{matrix.arch}}-readonly.csv"
-      - name: results-code-threadsafe
+              -DLFS_READONLY"
+          cp lfs.csv results/${{matrix.arch}}-readonly.csv
+          ./scripts/summary.py results/${{matrix.arch}}-readonly.csv
+      - name: results-threadsafe
         run: |
           mkdir -p results
           make clean
-          make code \
+          make lfs.csv \
             CFLAGS+=" \
               -DLFS_NO_ASSERT \
               -DLFS_NO_DEBUG \
               -DLFS_NO_WARN \
               -DLFS_NO_ERROR \
-              -DLFS_THREADSAFE" \
-            CODEFLAGS+="-o results/code-${{matrix.arch}}-threadsafe.csv"
-      - name: results-code-migrate
+              -DLFS_THREADSAFE"
+          cp lfs.csv results/${{matrix.arch}}-threadsafe.csv
+          ./scripts/summary.py results/${{matrix.arch}}-threadsafe.csv
+      - name: results-migrate
         run: |
           mkdir -p results
           make clean
-          make code \
+          make lfs.csv \
             CFLAGS+=" \
               -DLFS_NO_ASSERT \
               -DLFS_NO_DEBUG \
               -DLFS_NO_WARN \
               -DLFS_NO_ERROR \
-              -DLFS_MIGRATE" \
-            CODEFLAGS+="-o results/code-${{matrix.arch}}-migrate.csv"
-      - name: results-code-error-asserts
+              -DLFS_MIGRATE"
+          cp lfs.csv results/${{matrix.arch}}-migrate.csv
+          ./scripts/summary.py results/${{matrix.arch}}-migrate.csv
+      - name: results-error-asserts
         run: |
           mkdir -p results
           make clean
-          make code \
+          make lfs.csv \
             CFLAGS+=" \
               -DLFS_NO_DEBUG \
               -DLFS_NO_WARN \
               -DLFS_NO_ERROR \
-              -D'LFS_ASSERT(test)=do {if(!(test)) {return -1;}} while(0)'" \
-            CODEFLAGS+="-o results/code-${{matrix.arch}}-error-asserts.csv"
+              -D'LFS_ASSERT(test)=do {if(!(test)) {return -1;}} while(0)'"
+          cp lfs.csv results/${{matrix.arch}}-error-asserts.csv
+          ./scripts/summary.py results/${{matrix.arch}}-error-asserts.csv
       - name: upload-results
         uses: actions/upload-artifact@v2
         with:
           name: results
           path: results
-      # limit reporting to Thumb, otherwise there would be too many numbers
-      # flying around for the results to be easily readable
+
+      # create statuses with results
       - name: collect-status
-        if: ${{matrix.arch == 'thumb'}}
         run: |
           mkdir -p status
-          for f in $(shopt -s nullglob ; echo results/code*.csv)
+          for f in $(shopt -s nullglob ; echo results/*.csv)
           do
-            export STEP="results-code$(
-              echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p')"
-            export CONTEXT="results / code$(
-              echo $f | sed -n 's/.*code-.*-\(.*\).csv/ (\1)/p')"
-            export PREV="$(curl -sS \
-              "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \
-              | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
-                | select(.context == env.CONTEXT).description
-                | capture("Code size is (?<result>[0-9]+)").result' \
-              || echo 0)"
-            export DESCRIPTION="$(./scripts/code.py -u $f --summary | awk '
-              NR==2 {printf "Code size is %d B",$2}
-              NR==2 && ENVIRON["PREV"]+0 != 0 {
-                printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')"
-            jq -n '{
-              state: "success",
-              context: env.CONTEXT,
-              description: env.DESCRIPTION,
-              target_job: "${{github.job}} (${{matrix.arch}})",
-              target_step: env.STEP}' \
-              | tee status/code$(
-                echo $f | sed -n 's/.*code-.*-\(.*\).csv/-\1/p').json
+            export STEP="results$(
+              echo $f | sed -n 's/[^-]*-\(.*\).csv/-\1/p')"
+            for r in code stack structs
+            do
+              export CONTEXT="results (${{matrix.arch}}$(
+                echo $f | sed -n 's/[^-]*-\(.*\).csv/, \1/p')) / $r"
+              export PREV="$(curl -sS \
+                "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master?per_page=100" \
+                | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
+                  | select(.context == env.CONTEXT).description
+                  | capture("(?<result>[0-9∞]+)").result' \
+                || echo 0)"
+              export DESCRIPTION="$(./scripts/summary.py $f -f $r -Y | awk '
+                NR==2 {printf "%s B",$2}
+                NR==2 && ENVIRON["PREV"]+0 != 0 {
+                  printf " (%+.1f%%)",100*($2-ENVIRON["PREV"])/ENVIRON["PREV"]}')"
+              jq -n '{
+                state: "success",
+                context: env.CONTEXT,
+                description: env.DESCRIPTION,
+                target_job: "${{github.job}} (${{matrix.arch}})",
+                target_step: env.STEP}' \
+                | tee status/$r-${{matrix.arch}}$(
+                  echo $f | sed -n 's/[^-]*-\(.*\).csv/-\1/p').json
+            done
           done
       - name: upload-status
-        if: ${{matrix.arch == 'thumb'}}
         uses: actions/upload-artifact@v2
         with:
           name: status
@@ -252,7 +266,7 @@ jobs:
 
   # run under Valgrind to check for memory errors
   valgrind:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     steps:
       - uses: actions/checkout@v2
       - name: install
@@ -272,7 +286,7 @@ jobs:
 
   # self-host with littlefs-fuse for a fuzz-like test
   fuse:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     if: ${{!endsWith(github.ref, '-prefix')}}
     steps:
       - uses: actions/checkout@v2
@@ -318,7 +332,7 @@ jobs:
 
   # test migration using littlefs-fuse
   migrate:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     if: ${{!endsWith(github.ref, '-prefix')}}
     steps:
       - uses: actions/checkout@v2
@@ -385,7 +399,7 @@ jobs:
 
   # collect coverage info
   coverage:
-    runs-on: ubuntu-18.04
+    runs-on: ubuntu-20.04
     needs: [test]
     steps:
       - uses: actions/checkout@v2
@@ -421,14 +435,14 @@ jobs:
           export STEP="results-coverage"
           export CONTEXT="results / coverage"
           export PREV="$(curl -sS \
-            "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master" \
+            "$GITHUB_API_URL/repos/$GITHUB_REPOSITORY/status/master?per_page=100" \
             | jq -re 'select(.sha != env.GITHUB_SHA) | .statuses[]
               | select(.context == env.CONTEXT).description
-              | capture("Coverage is (?<result>[0-9\\.]+)").result' \
+              | capture("(?<result>[0-9\\.]+)").result' \
             || echo 0)"
           export DESCRIPTION="$(
-            ./scripts/coverage.py -u results/coverage.csv --summary | awk -F '[ /%]+' '
-              NR==2 {printf "Coverage is %.1f%% of %d lines",$4,$3}
+            ./scripts/coverage.py -u results/coverage.csv -Y | awk -F '[ /%]+' '
+              NR==2 {printf "%.1f%% of %d lines",$4,$3}
               NR==2 && ENVIRON["PREV"]+0 != 0 {
                 printf " (%+.1f%%)",$4-ENVIRON["PREV"]}')"
           jq -n '{