Merge pull request #2088 from tannewt/master
Swap the CI to GitHub Actions from Travis
This commit is contained in:
commit
193df55bec
192
.github/workflows/build.yml
vendored
Normal file
192
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,192 @@
|
||||
name: Build CI
|
||||
|
||||
on: [push, pull_request, release]
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-16.04
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Fail if not a release publish # workaround has `on` doesn't have this filter
|
||||
run: exit 1
|
||||
if: github.event_name == 'release' && (github.event.action != 'published' && github.event.action != 'rerequested')
|
||||
- name: Set up Python 3.5
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.5
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt-get install -y gettext librsvg2-bin
|
||||
pip install requests sh click setuptools cpp-coveralls Sphinx sphinx-rtd-theme recommonmark sphinxcontrib-svg2pdfconverter polib pyyaml
|
||||
- name: Versions
|
||||
run: |
|
||||
gcc --version
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
submodules: true
|
||||
- name: CircuitPython version
|
||||
run: git describe --dirty --always --tags
|
||||
- name: Build mpy-cross
|
||||
run: make -C mpy-cross -j2
|
||||
- name: Build unix port
|
||||
run: |
|
||||
make -C ports/unix deplibs -j2
|
||||
make -C ports/unix -j2
|
||||
make -C ports/unix coverage -j2
|
||||
- name: Test all
|
||||
run: MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1
|
||||
working-directory: tests
|
||||
- name: Print failure info
|
||||
run: |
|
||||
for exp in *.exp;
|
||||
do testbase=$(basename $exp .exp);
|
||||
echo -e "\nFAILURE $testbase";
|
||||
diff -u $testbase.exp $testbase.out;
|
||||
done
|
||||
working-directory: tests
|
||||
if: failure()
|
||||
- name: Test threads
|
||||
run: MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 -d thread
|
||||
working-directory: tests
|
||||
- name: Native Tests
|
||||
run: MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 --emit native
|
||||
working-directory: tests
|
||||
- name: mpy Tests
|
||||
run: MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 --via-mpy -d basics float
|
||||
working-directory: tests
|
||||
- name: Docs
|
||||
run: sphinx-build -E -W -b html . _build/html
|
||||
- name: Translations
|
||||
run: make check-translate
|
||||
- name: New boards check
|
||||
run: python3 -u ci_new_boards_check.py
|
||||
working-directory: tools
|
||||
|
||||
build-arm:
|
||||
runs-on: ubuntu-16.04
|
||||
needs: test
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
board:
|
||||
- "arduino_mkr1300"
|
||||
- "arduino_mkrzero"
|
||||
- "arduino_zero"
|
||||
- "bast_pro_mini_m0"
|
||||
- "capablerobot_usbhub"
|
||||
- "catwan_usbstick"
|
||||
- "circuitplayground_bluefruit"
|
||||
- "circuitplayground_express"
|
||||
- "circuitplayground_express_crickit"
|
||||
- "cp32-m4"
|
||||
- "datalore_ip_m4"
|
||||
- "datum_distance"
|
||||
- "datum_imu"
|
||||
- "datum_light"
|
||||
- "datum_weather"
|
||||
- "electronut_labs_blip"
|
||||
- "electronut_labs_papyr"
|
||||
- "escornabot_makech"
|
||||
- "feather_m0_adalogger"
|
||||
- "feather_m0_basic"
|
||||
- "feather_m0_express"
|
||||
- "feather_m0_express_crickit"
|
||||
- "feather_m0_rfm69"
|
||||
- "feather_m0_rfm9x"
|
||||
- "feather_m0_supersized"
|
||||
- "feather_m4_express"
|
||||
- "feather_nrf52840_express"
|
||||
- "feather_radiofruit_zigbee"
|
||||
- "gemma_m0"
|
||||
- "grandcentral_m4_express"
|
||||
- "hallowing_m0_express"
|
||||
- "itsybitsy_m0_express"
|
||||
- "itsybitsy_m4_express"
|
||||
- "kicksat-sprite"
|
||||
- "makerdiary_nrf52840_mdk"
|
||||
- "makerdiary_nrf52840_mdk_usb_dongle"
|
||||
- "meowmeow"
|
||||
- "metro_m0_express"
|
||||
- "metro_m4_airlift_lite"
|
||||
- "metro_m4_express"
|
||||
- "metro_nrf52840_express"
|
||||
- "mini_sam_m4"
|
||||
- "monster_m4sk"
|
||||
- "particle_argon"
|
||||
- "particle_boron"
|
||||
- "particle_xenon"
|
||||
- "pca10056"
|
||||
- "pca10059"
|
||||
- "pewpew10"
|
||||
- "pirkey_m0"
|
||||
- "pybadge"
|
||||
- "pybadge_airlift"
|
||||
- "pygamer"
|
||||
- "pygamer_advance"
|
||||
- "pyportal"
|
||||
- "pyportal_titano"
|
||||
- "pyruler"
|
||||
- "robohatmm1_m0"
|
||||
- "robohatmm1_m4"
|
||||
- "sam32"
|
||||
- "snekboard"
|
||||
- "sparkfun_lumidrive"
|
||||
- "sparkfun_nrf52840_mini"
|
||||
- "sparkfun_redboard_turbo"
|
||||
- "sparkfun_samd21_dev"
|
||||
- "sparkfun_samd21_mini"
|
||||
- "trellis_m4_express"
|
||||
- "trinket_m0"
|
||||
- "trinket_m0_haxpress"
|
||||
- "uchip"
|
||||
- "ugame10"
|
||||
|
||||
steps:
|
||||
- name: Set up Python 3.5
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.5
|
||||
- name: Install deps
|
||||
run: |
|
||||
sudo apt-get install -y gettext
|
||||
pip install requests sh click setuptools awscli
|
||||
wget https://s3.amazonaws.com/adafruit-circuit-python/gcc-arm-embedded_7-2018q2-1~xenial1_amd64.deb && sudo dpkg -i gcc-arm-embedded*_amd64.deb
|
||||
- name: Versions
|
||||
run: |
|
||||
gcc --version
|
||||
arm-none-eabi-gcc --version
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
submodules: true
|
||||
- name: mpy-cross
|
||||
run: make -C mpy-cross -j2
|
||||
- name: build
|
||||
run: python3 -u build_release_files.py
|
||||
working-directory: tools
|
||||
env:
|
||||
BOARDS: ${{ matrix.board }}
|
||||
- uses: actions/upload-artifact@v1.0.0
|
||||
with:
|
||||
name: ${{ matrix.board }}
|
||||
path: bin/${{ matrix.board }}
|
||||
- name: Upload to S3
|
||||
run: aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
if: github.event_name == 'push' || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||
- name: Install upload deps
|
||||
run: |
|
||||
pip install uritemplate
|
||||
- name: Upload to Release
|
||||
run: python3 -u upload_release_files.py
|
||||
working-directory: tools
|
||||
env:
|
||||
UPLOAD_URL: ${{ github.event.release.upload_url }}
|
||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
|
38
.github/workflows/create_website_pr.yml
vendored
Normal file
38
.github/workflows/create_website_pr.yml
vendored
Normal file
@ -0,0 +1,38 @@
|
||||
name: Update CircuitPython.org
|
||||
|
||||
on: release
|
||||
|
||||
jobs:
|
||||
website:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- name: Fail if not a release publish # workaround has `on` doesn't have this filter
|
||||
run: exit 1
|
||||
if: github.event.action != 'published'
|
||||
- name: Set up Python 3.5
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.5
|
||||
- name: Install deps
|
||||
run: |
|
||||
pip install requests sh click
|
||||
- name: Versions
|
||||
run: |
|
||||
gcc --version
|
||||
python3 --version
|
||||
- uses: actions/checkout@v1
|
||||
with:
|
||||
submodules: true
|
||||
- name: CircuitPython version
|
||||
run: git describe --dirty --always --tags
|
||||
- name: Website
|
||||
run: python3 build_board_info.py
|
||||
working-directory: tools
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
||||
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
|
161
.travis.yml
161
.travis.yml
@ -1,161 +0,0 @@
|
||||
sudo: required
|
||||
dist: xenial
|
||||
language: c
|
||||
compiler:
|
||||
- gcc
|
||||
git:
|
||||
depth: 6
|
||||
|
||||
# Each item under 'env' is a separate Travis job to execute.
|
||||
# They run in separate environments, so each one must take the time
|
||||
# to clone the repository and submodules; to download and install SDKs,
|
||||
# pip packages, and so forth. By gathering activities together in optimal
|
||||
# ways, the "run time" and "total time" of the travis jobs can be minimized.
|
||||
#
|
||||
# Builds are organized so some will complete quickly, and others are of
|
||||
# approximately equal size. Try not to freeze out other Travis users.
|
||||
#
|
||||
# Board names are in alphabetical order for easy finding, but grouped by
|
||||
# Adafruit/modified-Adafruit and Other. Ideally they'd be separated into
|
||||
# separate jobs, but there are too many.
|
||||
#
|
||||
# When adding new boards, take a look on the travis CI page
|
||||
# https://travis-ci.org/adafruit/circuitpython to which build that installs
|
||||
# that SDK is shortest and add it there. In the case of major re-organizations,
|
||||
# just try to make the builds "about equal in run time"
|
||||
env:
|
||||
# Non-board tasks
|
||||
- TRAVIS_TESTS="unix docs translations website" TRAVIS_BOARDS=""
|
||||
# Adafruit and Nordic nRF boards
|
||||
- TRAVIS_BOARDS="circuitplayground_bluefruit feather_nrf52840_express metro_nrf52840_express pca10056 pca10059" TRAVIS_SDK=arm
|
||||
# Other nRF boards
|
||||
- TRAVIS_BOARDS="electronut_labs_blip electronut_labs_papyr makerdiary_nrf52840_mdk makerdiary_nrf52840_mdk_usb_dongle particle_argon particle_boron particle_xenon sparkfun_nrf52840_mini" TRAVIS_SDK=arm
|
||||
# Adafruit and modified Adafruit SAMD21 (M0) + Other SAMD21 (M0)
|
||||
- TRAVIS_BOARDS="circuitplayground_express circuitplayground_express_crickit feather_m0_adalogger feather_m0_basic feather_m0_express feather_m0_express_crickit feather_m0_rfm69 feather_m0_rfm9x feather_m0_supersized feather_radiofruit_zigbee gemma_m0 hallowing_m0_express " TRAVIS_SDK=arm
|
||||
- TRAVIS_BOARDS="itsybitsy_m0_express metro_m0_express pirkey_m0 pyruler trinket_m0 trinket_m0_haxpress arduino_mkr1300 arduino_mkrzero arduino_zero bast_pro_mini_m0 catwan_usbstick datum_distance datum_imu datum_weather" TRAVIS_SDK=arm
|
||||
- TRAVIS_BOARDS="escornabot_makech meowmeow pewpew10 robohatmm1_m0 snekboard sparkfun_lumidrive sparkfun_redboard_turbo sparkfun_samd21_dev sparkfun_samd21_mini uchip ugame10" TRAVIS_SDK=arm
|
||||
# Adafruit SAMD51 (M4) + Other SAMD51
|
||||
- TRAVIS_BOARDS="feather_m4_express grandcentral_m4_express itsybitsy_m4_express metro_m4_airlift_lite metro_m4_express pybadge pybadge_airlift" TRAVIS_SDK=arm
|
||||
- TRAVIS_BOARDS="pyportal pyportal_titano trellis_m4_express capablerobot_usbhub cp32-m4 datalore_ip_m4 datum_light" TRAVIS_SDK=arm
|
||||
- TRAVIS_BOARDS="pygamer pygamer_advance monster_m4sk kicksat-sprite mini_sam_m4 robohatmm1_m4 sam32" TRAVIS_SDK=arm
|
||||
|
||||
|
||||
addons:
|
||||
artifacts:
|
||||
paths:
|
||||
- $(ls -d1 bin/*/*/* | tr "\n" ":")
|
||||
target_paths: /
|
||||
|
||||
# Some deploy jobs take over 10 minutes so use this keep alive hack to make sure Travis doesn't kill us.
|
||||
before_deploy: |
|
||||
function keep_alive() {
|
||||
while true; do
|
||||
echo -en "\a"
|
||||
sleep 5
|
||||
done
|
||||
}
|
||||
keep_alive &
|
||||
|
||||
deploy:
|
||||
provider: releases
|
||||
api_key:
|
||||
secure: "jdqVFw6itRY4qwQF4ReXo0uaymT+Mob6RhYX0lw8KWFNqBgHnLVuKmKKcGMEuRvBVMPkvxF7bMuOQzSBOunqwlHFse3oMzdWvQODv1zwV7pSRXGwTdIvTPbBjKWxnBG9uSNRf2R5AMflJFUxy2CbtBpgvNzr+4VLREZDrrjEu8C1iTtXGpSm5AQ5iIp2fkMAWD85FP7CQPpkqRoxhSIFZmTdurfYRmenq1OZ/4SeD5TESKcyvqJNzVT9z210B3cg3eAkP6ukvelW4qE2zgIANqUkGqvDEnAvEII9M89kuwhCMAekdfwnUSPrry+g77i1dUZHoRN1+MFj+waYtPaqxdYo2G1sysa6enxlu4jHMR5MfMk9eKHgaNgL3PiyANusYSS44amh8QIiVaX5nw82myZDCpQOZW7YqJKE6WX70Lbs4mS+wIs+ig4KIXO1B0p9kMb0OeVjHRl+KcXsWGRu/ECG/ExpqlVIssSPU407LohMXT2cJ37CY/R/EeK2XSDsQ2M3L3EAGUjCJdBGuwsOJ+2lG+HQpAVu9vAB4kq5jy9Ye+MG+8Xlkly3XZZ5+FkXyYxKnXb26/QVv0e5sIG5OmdJCPYFaH2J1QdKo7CdhEcBtrf6DMPWaimGMldShFqzLjOz3b3qLysRxFF0aGb7ipKPa57vawNzYHoPAViOcXQ="
|
||||
file_glob: true
|
||||
file: "$TRAVIS_BUILD_DIR/bin/*/*/*"
|
||||
skip_cleanup: true
|
||||
on:
|
||||
tags: true
|
||||
|
||||
notifications:
|
||||
webhooks:
|
||||
urls:
|
||||
- https://rosie-ci.ngrok.io/travis
|
||||
on_success: always
|
||||
on_failure: always
|
||||
on_start: always
|
||||
on_cancel: always
|
||||
on_error: always
|
||||
|
||||
before_script:
|
||||
# Expand the git tree back to 4.0.0-alpha.1 and then fetch the latest tag.
|
||||
- LAST_TAG=`git ls-remote --quiet --tags --sort=version:refname | egrep -o "refs/tags/[0-9]+.*\$" | tail -n 1`
|
||||
- git fetch --depth 1 origin $LAST_TAG:$LAST_TAG
|
||||
- git describe --dirty --always --tags
|
||||
- function var_search () { case "$1" in *$2*) true;; *) false;; esac; }
|
||||
- sudo dpkg --add-architecture i386
|
||||
|
||||
- (! var_search "${TRAVIS_SDK-}" arm || (wget https://s3.amazonaws.com/adafruit-circuit-python/gcc-arm-embedded_7-2018q2-1~xenial1_amd64.deb && sudo dpkg -i gcc-arm-embedded*_amd64.deb))
|
||||
|
||||
# For huzzah builds
|
||||
- (! var_search "${TRAVIS_SDK-}" esp8266 || (wget https://github.com/jepler/esp-open-sdk/releases/download/2018-06-10/xtensa-lx106-elf-standalone.tar.gz && tar -C .. -xaf xtensa-lx106-elf-standalone.tar.gz))
|
||||
- if var_search "${TRAVIS_SDK-}" esp8266 ; then PATH=$(readlink -f ../xtensa-lx106-elf/bin):$PATH; fi
|
||||
|
||||
# For coverage testing (upgrade is used to get latest urllib3 version)
|
||||
- sudo apt-get install -y python3-pip
|
||||
- pip3 install --user sh click setuptools
|
||||
- ([[ -z "$TRAVIS_TESTS" ]] || sudo pip install --upgrade cpp-coveralls)
|
||||
- (! var_search "${TRAVIS_TESTS-}" docs || sudo apt-get install -y librsvg2-bin)
|
||||
- (! var_search "${TRAVIS_TESTS-}" docs || pip3 install --user Sphinx sphinx-rtd-theme recommonmark sphinxcontrib-svg2pdfconverter)
|
||||
- (! var_search "${TRAVIS_TESTS-}" translations || pip3 install --user polib)
|
||||
|
||||
# Check if there's any board missing in TRAVIS_BOARDS
|
||||
- cd tools && python3 -u travis_new_boards_check.py
|
||||
- cd ..
|
||||
|
||||
# report some good version numbers to the build
|
||||
- gcc --version
|
||||
- (! var_search "${TRAVIS_SDK-}" arm || arm-none-eabi-gcc --version)
|
||||
- (! var_search "${TRAVIS_SDK-}" esp8266 || xtensa-lx106-elf-gcc --version)
|
||||
- python3 --version
|
||||
|
||||
script:
|
||||
# Build mpy-cross first because other builds depend on it.
|
||||
- echo 'Building mpy-cross' && echo 'travis_fold:start:mpy-cross'
|
||||
- make -C mpy-cross -j2 ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:mpy-cross' && tools/print_status.py status
|
||||
|
||||
# Use unbuffered output because building all the releases can take a long time.
|
||||
# Travis will cancel the job if it sees no output for >10 minutes.
|
||||
- cd tools && python3 -u build_release_files.py
|
||||
- cd ..
|
||||
|
||||
- echo 'Building unix' && echo 'travis_fold:start:unix'
|
||||
- (! var_search "${TRAVIS_TESTS-}" unix || (make -C ports/unix deplibs -j2 && make -C ports/unix -j2 && make -C ports/unix coverage -j2)) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:unix' && tools/print_status.py status
|
||||
|
||||
# run tests without coverage info
|
||||
#- (cd tests && MICROPY_CPYTHON3=python3.4 ./run-tests -j1)
|
||||
#- (cd tests && MICROPY_CPYTHON3=python3.4 ./run-tests -j1 --emit native)
|
||||
|
||||
# run tests with coverage info
|
||||
- echo 'Test all' && echo 'travis_fold:start:test_all'
|
||||
- (! var_search "${TRAVIS_TESTS-}" unix || (cd tests && MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1)) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:test_all' && tools/print_status.py status
|
||||
|
||||
- echo 'Test threads' && echo 'travis_fold:start:test_threads'
|
||||
- (! var_search "${TRAVIS_TESTS-}" unix || (cd tests && MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 -d thread)) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:test_threads' && tools/print_status.py status
|
||||
|
||||
- echo 'Testing with native' && echo 'travis_fold:start:test_native'
|
||||
- (! var_search "${TRAVIS_TESTS-}" unix || (cd tests && MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 --emit native)) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:test_native' && tools/print_status.py status
|
||||
|
||||
- (echo 'Testing with mpy' && echo 'travis_fold:start:test_mpy')
|
||||
- (! var_search "${TRAVIS_TESTS-}" unix || (cd tests && MICROPY_CPYTHON3=python3.5 MICROPY_MICROPYTHON=../ports/unix/micropython_coverage ./run-tests -j1 --via-mpy -d basics float)) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:test_mpy' && tools/print_status.py status
|
||||
|
||||
- (echo 'Building docs' && echo 'travis_fold:start:build_docs')
|
||||
- (! var_search "${TRAVIS_TESTS-}" docs || sphinx-build -E -W -b html . _build/html) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:build_docs' && tools/print_status.py status
|
||||
|
||||
- (echo 'Building translations' && echo 'travis_fold:start:build_translations')
|
||||
- (! var_search "${TRAVIS_TESTS-}" translations || make check-translate) ; S=$? ; echo $S > status ; (exit $S)
|
||||
- echo 'travis_fold:end:build_translations' && tools/print_status.py status
|
||||
|
||||
# run coveralls coverage analysis (try to, even if some builds/tests failed)
|
||||
#- (cd ports/unix && coveralls --root ../.. --build-root . --gcov $(which gcov) --gcov-options '\-o build-coverage/' --include py --include extmod)
|
||||
|
||||
- (! var_search "${TRAVIS_TESTS-}" website || (cd tools && python3 build_board_info.py && cd ..))
|
||||
|
||||
after_failure:
|
||||
- (cd tests && for exp in *.exp; do testbase=$(basename $exp .exp); echo -e "\nFAILURE $testbase"; diff -u $testbase.exp $testbase.out; done)
|
@ -88,10 +88,8 @@ def get_version_info():
|
||||
# No exact match
|
||||
pass
|
||||
|
||||
if "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true":
|
||||
sha = os.environ["TRAVIS_COMMIT"]
|
||||
if os.environ["TRAVIS_PULL_REQUEST"] != "false":
|
||||
sha = os.environ["TRAVIS_PULL_REQUEST_SHA"]
|
||||
if "GITHUB_SHA" in os.environ:
|
||||
sha = os.environ["GITHUB_SHA"]
|
||||
|
||||
if not version:
|
||||
version="{}-{}".format(date.today().strftime("%Y%m%d"), sha[:7])
|
||||
@ -132,7 +130,7 @@ def create_pr(changes, updated, git_info):
|
||||
updated_list.append(info)
|
||||
|
||||
updated = json.dumps(updated_list, sort_keys=True, indent=4).encode("utf-8") + b"\n"
|
||||
print(updated.decode("utf-8"))
|
||||
#print(updated.decode("utf-8"))
|
||||
pr_title = "Automated website update for release {}".format(changes["new_release"])
|
||||
boards = ""
|
||||
if changes["new_boards"]:
|
||||
@ -208,7 +206,7 @@ def generate_download_info():
|
||||
boards = {}
|
||||
errors = []
|
||||
|
||||
new_tag = os.environ["TRAVIS_TAG"]
|
||||
new_tag = os.environ["RELEASE_TAG"]
|
||||
|
||||
changes = {
|
||||
"new_release": new_tag,
|
||||
@ -240,7 +238,6 @@ def generate_download_info():
|
||||
|
||||
board_mapping = get_board_mapping()
|
||||
|
||||
print(previous_releases)
|
||||
for release in previous_releases:
|
||||
update_downloads(board_mapping, release)
|
||||
|
||||
@ -280,7 +277,7 @@ def generate_download_info():
|
||||
print("No new release to update")
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "TRAVIS_TAG" in os.environ and os.environ["TRAVIS_TAG"]:
|
||||
if "RELEASE_TAG" in os.environ and os.environ["RELEASE_TAG"]:
|
||||
generate_download_info()
|
||||
else:
|
||||
print("skipping website update because this isn't a tag")
|
||||
|
@ -10,21 +10,14 @@ import time
|
||||
for port in build_info.SUPPORTED_PORTS:
|
||||
result = subprocess.run("rm -rf ../ports/{port}/build*".format(port=port), shell=True)
|
||||
|
||||
ROSIE_SETUPS = ["rosie-ci"]
|
||||
rosie_ok = {}
|
||||
for rosie in ROSIE_SETUPS:
|
||||
rosie_ok[rosie] = True
|
||||
|
||||
PARALLEL = "-j 5"
|
||||
travis = False
|
||||
if "TRAVIS" in os.environ and os.environ["TRAVIS"] == "true":
|
||||
if "GITHUB_ACTION" in os.environ:
|
||||
PARALLEL="-j 2"
|
||||
travis = True
|
||||
|
||||
all_boards = build_info.get_board_mapping()
|
||||
build_boards = list(all_boards.keys())
|
||||
if "TRAVIS_BOARDS" in os.environ:
|
||||
build_boards = os.environ["TRAVIS_BOARDS"].split()
|
||||
if "BOARDS" in os.environ:
|
||||
build_boards = os.environ["BOARDS"].split()
|
||||
|
||||
sha, version = build_info.get_version_info()
|
||||
|
||||
@ -83,25 +76,14 @@ for board in build_boards:
|
||||
if exit_status == 0:
|
||||
exit_status = 1
|
||||
|
||||
if travis:
|
||||
print('travis_fold:start:adafruit-bins-{}-{}\\r'.format(language, board))
|
||||
print("Build {board} for {language}{clean_build} took {build_duration:.2f}s and {success}".format(
|
||||
board=board, language=language, clean_build=(" (clean_build)" if clean_build else ""),
|
||||
build_duration=build_duration, success=success))
|
||||
if make_result.returncode != 0:
|
||||
print(make_result.stdout.decode("utf-8"))
|
||||
print(other_output)
|
||||
# Only upload to Rosie if its a pull request.
|
||||
if travis:
|
||||
for rosie in ROSIE_SETUPS:
|
||||
if not rosie_ok[rosie]:
|
||||
break
|
||||
print("Uploading to https://{rosie}.ngrok.io/upload/{sha}".format(rosie=rosie, sha=sha))
|
||||
#curl -F "file=@$final_filename" https://$rosie.ngrok.io/upload/$sha
|
||||
if travis:
|
||||
print('travis_fold:end:adafruit-bins-{}-{}\\r'.format(language, board))
|
||||
|
||||
# Flush so travis will see something before 10 minutes has passed.
|
||||
print(make_result.stdout.decode("utf-8"))
|
||||
print(other_output)
|
||||
|
||||
# Flush so we will see something before 10 minutes has passed.
|
||||
print(flush=True)
|
||||
|
||||
sys.exit(exit_status)
|
||||
|
50
tools/ci_new_boards_check.py
Normal file
50
tools/ci_new_boards_check.py
Normal file
@ -0,0 +1,50 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
|
||||
import build_board_info
|
||||
|
||||
workflow_file = '.github/workflows/build.yml'
|
||||
|
||||
# Get boards in json format
|
||||
boards_info_json = build_board_info.get_board_mapping()
|
||||
|
||||
# Get all the boards out of the json format
|
||||
info_boards = [board for board in boards_info_json.keys() if not boards_info_json[board].get("alias", False)]
|
||||
|
||||
# We need to know the path of the workflow file
|
||||
base_path = os.path.dirname(__file__)
|
||||
yml_path = os.path.abspath(os.path.join(base_path, '..', workflow_file))
|
||||
|
||||
# Loading board list based on build jobs in the workflow file.
|
||||
ci_boards = []
|
||||
with open(yml_path, "r") as f:
|
||||
workflow = yaml.safe_load(f)
|
||||
|
||||
ok = True
|
||||
for job in workflow["jobs"]:
|
||||
if not job.startswith("build"):
|
||||
continue
|
||||
job_boards = workflow["jobs"][job]["strategy"]["matrix"]["board"]
|
||||
if job_boards != sorted(job_boards):
|
||||
print("Boards for job \"{}\" not sorted. Must be:".format(job))
|
||||
print(" - \"" + "\"\n - \"".join(sorted(job_boards)) + "\"")
|
||||
ok = False
|
||||
ci_boards.extend(job_boards)
|
||||
|
||||
# All the travis_boards elements must be on info_boards
|
||||
info_boards.sort()
|
||||
ci_boards.sort()
|
||||
|
||||
missing_boards = set(info_boards) - set(ci_boards)
|
||||
|
||||
if missing_boards:
|
||||
print('Boards missing in {}:'.format(workflow_file))
|
||||
for board in missing_boards:
|
||||
print(board)
|
||||
|
||||
if not ok:
|
||||
sys.exit(1)
|
@ -1,51 +0,0 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
|
||||
import build_board_info
|
||||
|
||||
# Get boards in json format
|
||||
boards_info_json = build_board_info.get_board_mapping()
|
||||
|
||||
# Get all the boards out of the json format
|
||||
info_boards = [board for board in boards_info_json.keys() if not boards_info_json[board].get("alias", False)]
|
||||
|
||||
# We need to know the path of the .travis.yml file
|
||||
base_path = os.path.dirname(__file__)
|
||||
travis_path = os.path.abspath(os.path.join(base_path, '..', '.travis.yml'))
|
||||
|
||||
# Loading board list based on TRAVIS_BOARDS env variable on .travis.yml
|
||||
travis_boards = []
|
||||
with open(travis_path, 'r') as travis:
|
||||
|
||||
# Get all lines that contain the substring 'TRAVIS_BOARDS'
|
||||
for line in travis:
|
||||
|
||||
# Get the lines with TRAVIS_BOARDS= in it
|
||||
if line.find('TRAVIS_BOARDS=') is not -1:
|
||||
# Store all the boards names into travis_boards
|
||||
begin_of_names = line.find('TRAVIS_BOARDS=') + len('TRAVIS_BOARDS=') + 1
|
||||
end_of_names = line.rfind('"')
|
||||
boards = line[begin_of_names:end_of_names]
|
||||
boards = boards.split(' ')
|
||||
travis_boards.extend(boards)
|
||||
|
||||
# We've reached the end of the env: section
|
||||
elif 'addons' in line:
|
||||
break
|
||||
else:
|
||||
pass
|
||||
|
||||
# All the travis_boards elements must be on info_boards
|
||||
info_boards.sort()
|
||||
travis_boards.sort()
|
||||
|
||||
missing_boards = set(info_boards) - set(travis_boards)
|
||||
|
||||
if missing_boards:
|
||||
print('Boards missing in TRAVIS_BOARDS:')
|
||||
for board in missing_boards:
|
||||
print(board)
|
||||
sys.exit(1)
|
32
tools/upload_release_files.py
Executable file
32
tools/upload_release_files.py
Executable file
@ -0,0 +1,32 @@
|
||||
#! /usr/bin/env python3
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import uritemplate
|
||||
|
||||
sys.path.append("adabot")
|
||||
import adabot.github_requests as github
|
||||
|
||||
exit_status = 0
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk("../bin"):
|
||||
if not filenames:
|
||||
continue
|
||||
for filename in filenames:
|
||||
full_filename = os.path.join(dirpath, filename)
|
||||
label = filename.replace("adafruit-circuitpython-", "")
|
||||
url_vars = {}
|
||||
url_vars["name"] = filename
|
||||
url_vars["label"] = label
|
||||
url = uritemplate.expand(os.environ["UPLOAD_URL"], url_vars)
|
||||
headers = {"content-type": "application/octet-stream"}
|
||||
print(url)
|
||||
with open(full_filename, "rb") as f:
|
||||
response = github.post(url, data=f, headers=headers)
|
||||
if not response.ok:
|
||||
print("Upload of {} failed with {}.".format(filename, response.status_code))
|
||||
print(response.text)
|
||||
sys.exit(response.status_code)
|
||||
|
||||
sys.exit(exit_status)
|
Loading…
Reference in New Issue
Block a user