Merge branch 'adafruit:main' into lilygo_t_display_rp2040

This commit is contained in:
Eric Rong 2023-05-12 22:02:16 -07:00 committed by GitHub
commit 22665a113d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2655 changed files with 110702 additions and 38968 deletions

View File

@ -0,0 +1,7 @@
#define MICROPY_HW_BOARD_NAME "BLOK"
USB_PRODUCT = "BLOK"
uint32_t THI = (*(uint32_t *)FUSES_HOT_TEMP_VAL_INT_ADDR & FUSES_HOT_TEMP_VAL_INT_Msk) >> FUSES_HOT_TEMP_VAL_INT_Pos;
float TH = THI + convert_dec_to_frac(THD);
print(binascii.b2a_base64(b"fo"))
# again, neither will "there" or "wither", since they have "the"
i1Qb$TE"rl

View File

@ -0,0 +1,22 @@
ans
ure
clen
ser
endianess
pris
synopsys
reenable
dout
inout
wel
iput
hsi
astroid
busses
cyphertext
dum
deque
deques
extint
shs
pass-thru

10
.codespellrc Normal file
View File

@ -0,0 +1,10 @@
# See: https://github.com/codespell-project/codespell#using-a-config-file
[codespell]
# In the event of a false positive, add the problematic word, in all lowercase, to 'ignore-words.txt' (one word per line).
# Or copy & paste the whole problematic line to 'exclude-file.txt'
ignore-words = .codespell/ignore-words.txt
exclude-file = .codespell/exclude-file.txt
check-filenames =
check-hidden =
count =
skip = .cproject,.git,./lib,./locale,ACKNOWLEDGEMENTS

31
.devcontainer/Readme.md Normal file
View File

@ -0,0 +1,31 @@
Build CircuitPython in a Github-Devcontainer
============================================
To build CircuitPython within a Github-Devcontainer, you need to perform
the following steps.
1. checkout the code to a devcontainer
- click on the green "<> Code"-button
- select the Codespaces-tab
- choose "+ new with options..." from the "..."-menu
- in the following screen select the branch and then
- select ".devcontainer/cortex-m/devcontainer.json" instead
of "Default Codespaces configuration"
- update region as necessary
- finally, click on the green "Create codespace" button
2. Your codespace is created. Cloning the images is quite fast, but
preparing it for CircuitPython-development takes about 10 minutes.
Note that this is a one-time task.
3. During creation, you can run the command
`tail -f /workspaces/.codespaces/.persistedshare/creation.log`
to see what is going on.
4. To actually build CircuitPython, run
cd ports/raspberrypi
make -j $(nproc) BOARD=whatever TRANSLATION=xx_XX
This takes about 2m40s.

View File

@ -0,0 +1,23 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/universal
{
"name": "CircuitPython Cortex-M Build-Environment (base: Default Linux Universal)",
"image": "mcr.microsoft.com/devcontainers/universal:2-linux",
"postCreateCommand": ".devcontainer/cortex-m/on-create.sh",
"remoteEnv": { "PATH": "/workspaces/gcc-arm-none-eabi/bin:${containerEnv:PATH}" }
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "uname -a",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

View File

@ -0,0 +1,59 @@
#!/bin/bash
# -----------------------------------------------------------------------------
# on-create.sh: postCreateCommand-hook for devcontainer.json (Cortex-M build)
#
# Author: Bernhard Bablok
#
# -----------------------------------------------------------------------------
echo -e "[on-create.sh] downloading and installing gcc-arm-non-eabi toolchain"
cd /workspaces
wget -qO gcc-arm-none-eabi.tar.bz2 https://adafru.it/Pid
tar -xjf gcc-arm-none-eabi.tar.bz2
ln -s gcc-arm-none-eabi-10-2020-q4-major gcc-arm-none-eabi
rm -f /workspaces/gcc-arm-none-eabi.tar.bz2
export PATH=/workspaces/gcc-arm-none-eabi/bin:$PATH
# add repository and install tools
echo -e "[on-create.sh] adding pybricks/ppa"
sudo add-apt-repository -y ppa:pybricks/ppa
echo -e "[on-create.sh] installing uncrustify and mtools"
sudo apt-get -y install uncrustify mtools
# dosfstools >= 4.2 needed, standard repo only has 4.1
echo -e "[on-create.sh] downloading and installing dosfstools"
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz
tar -xzf dosfstools-4.2.tar.gz
cd dosfstools-4.2/
./configure
make -j $(nproc)
sudo make install
cd /workspaces
rm -fr /workspaces/dosfstools-4.2 /workspaces/dosfstools-4.2.tar.gz
# prepare source-code tree
cd /workspaces/circuitpython/
echo -e "[on-create.sh] fetching submodules"
make fetch-submodules
echo -e "[on-create.sh] fetching tags"
git fetch --tags --recurse-submodules=no --shallow-since="2021-07-01" https://github.com/adafruit/circuitpython HEAD
# additional python requirements
echo -e "[on-create.sh] pip-installing requirements"
pip install --upgrade -r requirements-dev.txt
pip install --upgrade -r requirements-doc.txt
# add pre-commit
echo -e "[on-create.sh] installing pre-commit"
pre-commit install
# create cross-compiler
echo -e "[on-create.sh] building mpy-cross"
make -j $(nproc) -C mpy-cross # time: about 36 sec
# that's it!
echo -e "[on-create.sh] setup complete"
#commands to actually build CP:
#cd ports/raspberrypi
#time make -j $(nproc) BOARD=pimoroni_tufty2040 TRANSLATION=de_DE

View File

@ -6,7 +6,7 @@ body:
- type: markdown - type: markdown
attributes: attributes:
value: >- value: >-
Thanks! for testing out CircuitPython. Now that you have encountered a Thanks for testing out CircuitPython! Now that you have encountered a
bug... you can file a report for it. bug... you can file a report for it.
- type: textarea - type: textarea
id: firmware id: firmware

View File

@ -0,0 +1,63 @@
name: Fetch external deps
inputs:
action:
required: false
default: restore
type: choice
options:
- cache
- restore
port:
required: false
default: none
type: string
runs:
using: composite
steps:
# arm
- name: Get arm toolchain
if: >-
inputs.port != 'none' &&
inputs.port != 'litex' &&
inputs.port != 'espressif'
uses: carlosperate/arm-none-eabi-gcc-action@v1
with:
release: '10-2020-q4'
# espressif
- name: Get espressif toolchain
if: inputs.port == 'espressif'
run: |
sudo apt-get update
sudo apt-get install -y ninja-build
shell: bash
- name: Install IDF tools
if: inputs.port == 'espressif'
run: |
echo "Installing ESP-IDF tools"
$IDF_PATH/tools/idf_tools.py --non-interactive install required
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
echo "Installing Python environment and packages"
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
rm -rf $IDF_TOOLS_PATH/dist
shell: bash
- name: Set environment
if: inputs.port == 'espressif'
run: |
source $IDF_PATH/export.sh
echo >> $GITHUB_ENV "IDF_PYTHON_ENV_PATH=$IDF_PYTHON_ENV_PATH"
echo >> $GITHUB_PATH "$PATH"
shell: bash
# common
- name: Cache python dependencies
if: inputs.port != 'espressif'
uses: ./.github/actions/deps/python
with:
action: ${{ inputs.action }}
- name: Install python dependencies
run: pip install -r requirements-dev.txt
shell: bash

36
.github/actions/deps/ports/action.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: Fetch port deps
inputs:
board:
required: true
type: string
outputs:
port:
value: ${{ steps.board-to-port.outputs.port }}
runs:
using: composite
steps:
- name: Board to port
id: board-to-port
run: |
PORT=$(find ports/*/boards/ -type d -name ${{ inputs.board }} | sed 's/^ports\///g;s/\/boards.*//g')
if [ -z $PORT ]; then (exit 1); else echo >> $GITHUB_OUTPUT "port=$PORT"; fi
shell: bash
- name: Set up broadcom
if: steps.board-to-port.outputs.port == 'broadcom'
uses: ./.github/actions/deps/ports/broadcom
- name: Set up espressif
if: steps.board-to-port.outputs.port == 'espressif'
uses: ./.github/actions/deps/ports/espressif
- name: Set up litex
if: steps.board-to-port.outputs.port == 'litex'
uses: ./.github/actions/deps/ports/litex
- name: Set up nrf
if: steps.board-to-port.outputs.port == 'nrf'
uses: ./.github/actions/deps/ports/nrf

View File

@ -0,0 +1,22 @@
name: Fetch broadcom port deps
runs:
using: composite
steps:
- name: Get broadcom toolchain
run: |
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
sudo apt-get update
sudo apt-get install -y mtools
shell: bash
- name: Install mkfs.fat
run: |
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz
tar -xaf dosfstools-4.2.tar.gz
cd dosfstools-4.2
./configure
make -j 2
cd src
echo >> $GITHUB_PATH $(pwd)
shell: bash

View File

@ -0,0 +1,36 @@
name: Fetch espressif port deps
runs:
using: composite
steps:
- name: Set IDF env
run: |
echo >> $GITHUB_ENV "IDF_PATH=$GITHUB_WORKSPACE/ports/espressif/esp-idf"
echo >> $GITHUB_ENV "IDF_TOOLS_PATH=$GITHUB_WORKSPACE/.idf_tools"
shell: bash
- name: Get IDF commit
id: idf-commit
run: |
COMMIT=$(git submodule status ports/espressif/esp-idf | grep -o -P '(?<=^-).*(?= )')
echo "$COMMIT"
echo "commit=$COMMIT" >> $GITHUB_OUTPUT
shell: bash
- name: Cache IDF submodules
uses: actions/cache@v3
with:
path: |
.git/modules/ports/espressif/esp-idf
ports/espressif/esp-idf
key: submodules-idf-${{ steps.idf-commit.outputs.commit }}
- name: Cache IDF tools
uses: actions/cache@v3
with:
path: ${{ env.IDF_TOOLS_PATH }}
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-idf-${{ steps.idf-commit.outputs.commit }}
- name: Initialize IDF submodules
run: git submodule update --init --depth=1 --recursive $IDF_PATH
shell: bash

View File

@ -0,0 +1,10 @@
name: Fetch litex port deps
runs:
using: composite
steps:
- name: Get litex toolchain
run: |
wget https://static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
sudo tar -C /usr --strip-components=1 -xaf riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
shell: bash

View File

@ -0,0 +1,17 @@
name: Fetch nrf port deps
runs:
using: composite
steps:
- name: Get nrfutil 7+
run: |
wget https://developer.nordicsemi.com/.pc-tools/nrfutil/x64-linux/nrfutil
chmod +x nrfutil
./nrfutil install nrf5sdk-tools
mkdir -p $HOME/.local/bin
mv nrfutil $HOME/.local/bin
echo "$HOME/.local/bin" >> $GITHUB_PATH
shell: bash
- name: Print nrfutil version
run: nrfutil -V
shell: bash

42
.github/actions/deps/python/action.yml vendored Normal file
View File

@ -0,0 +1,42 @@
name: Fetch python deps
inputs:
action:
description: The cache action to use
required: false
default: restore
type: choice
options:
- cache
- restore
runs:
using: composite
steps:
- name: Cache python dependencies
id: cache-python-deps
if: inputs.action == 'cache'
uses: actions/cache@v3
with:
path: .cp_tools
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-cp-${{ hashFiles('requirements-dev.txt') }}
- name: Restore python dependencies
id: restore-python-deps
if: inputs.action == 'restore'
uses: actions/cache/restore@v3
with:
path: .cp_tools
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-cp-${{ hashFiles('requirements-dev.txt') }}
- name: Set up venv
if: inputs.action == 'cache' && !steps.cache-python-deps.outputs.cache-hit
run: python -m venv .cp_tools
shell: bash
- name: Activate venv
if: inputs.action == 'cache' || (inputs.action == 'restore' && steps.restore-python-deps.outputs.cache-hit)
run: |
source .cp_tools/bin/activate
echo >> $GITHUB_PATH "$PATH"
shell: bash

View File

@ -0,0 +1,87 @@
name: 'Fetch Submodules'
inputs:
target:
description: 'The target for ci_fetch_deps'
required: false
type: string
submodules:
description: 'The submodules to cache'
required: false
default: '["extmod/ulab", "lib/", "tools/"]'
type: string
action:
description: 'The cache action to use'
required: false
default: 'restore'
type: choice
options:
- cache
- restore
version:
description: 'Whether to generate CP version'
required: false
default: false
type: boolean
outputs:
frozen:
description: 'Whether frozen submodules were fetched'
value: ${{ steps.cp-deps.outputs.frozen_tags }}
version:
description: 'The CP version'
value: ${{ steps.cp-version.outputs.cp-version }}
runs:
using: "composite"
steps:
- name: Create submodule status
id: create-submodule-status
run: |
git submodule status ${{ join(fromJSON(inputs.submodules), ' ') }} >> submodule_status
echo $(cut -d ' ' -f 2 submodule_status) | echo "submodules=[\"$(sed "s/ /\", \"/g")\"]" >> $GITHUB_OUTPUT
shell: bash
- name: Cache submodules
if: ${{ inputs.action == 'cache' }}
uses: actions/cache@v3
with:
path: ".git/modules/\n${{ join(fromJSON(steps.create-submodule-status.outputs.submodules), '\n') }}"
key: submodules-common-${{ hashFiles('submodule_status') }}
enableCrossOsArchive: true
- name: Restore submodules
if: ${{ inputs.action == 'restore' }}
uses: actions/cache/restore@v3
with:
path: ".git/modules/\n${{ join(fromJSON(steps.create-submodule-status.outputs.submodules), '\n') }}"
key: submodules-common-${{ hashFiles('submodule_status') }}
enableCrossOsArchive: true
- name: Remove submodule status
run: rm submodule_status
shell: bash
- name: CircuitPython dependencies
id: cp-deps
run: python tools/ci_fetch_deps.py ${{ inputs.target || matrix.board || github.job }}
shell: bash
- name: CircuitPython version
id: cp-version
if: ${{ inputs.version == 'true' }}
run: |
echo "::group::Fetch history and tags"
git fetch --no-recurse-submodules --shallow-since="2021-07-01" --tags https://github.com/adafruit/circuitpython HEAD
git fetch --no-recurse-submodules --shallow-since="2021-07-01" origin $GITHUB_SHA
git repack -d
echo "::endgroup::"
CP_VERSION=$(tools/describe)
echo "$CP_VERSION"
echo "CP_VERSION=$CP_VERSION" >> $GITHUB_ENV
echo "cp-version=$CP_VERSION" >> $GITHUB_OUTPUT
shell: bash

37
.github/actions/mpy_cross/action.yml vendored Normal file
View File

@ -0,0 +1,37 @@
name: Set up mpy-cross
inputs:
download:
required: false
default: true
type: boolean
runs:
using: composite
steps:
- name: Download mpy-cross
id: download-mpy-cross
if: inputs.download == 'true'
continue-on-error: true
uses: actions/download-artifact@v3
with:
name: mpy-cross
path: mpy-cross
- name: Make mpy-cross executable
if: inputs.download == 'true' && steps.download-mpy-cross.outcome == 'success'
run: sudo chmod +x mpy-cross/mpy-cross
shell: bash
- name: Build mpy-cross
if: inputs.download == 'false' || steps.download-mpy-cross.outcome == 'failure'
run: make -C mpy-cross -j2
shell: bash
- name: Upload mpy-cross
if: inputs.download == 'false' || steps.download-mpy-cross.outcome == 'failure'
continue-on-error: true
uses: actions/upload-artifact@v3
with:
name: mpy-cross
path: mpy-cross/mpy-cross

33
.github/actions/upload_aws/action.yml vendored Normal file
View File

@ -0,0 +1,33 @@
name: Upload to AWS S3
inputs:
source:
required: true
type: string
destination:
required: false
type: string
AWS_ACCESS_KEY_ID:
required: true
AWS_SECRET_ACCESS_KEY:
required: true
runs:
using: composite
steps:
- name: Upload to S3
if: >-
(github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') ||
(github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
run: >-
[ -z "$AWS_ACCESS_KEY_ID" ] ||
aws s3 cp ${{ inputs.source }} s3://adafruit-circuit-python/bin/${{ inputs.destination }}
${{ endsWith(inputs.source, '/') && '--recursive' || '' }} --no-progress --region us-east-1
env:
AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ inputs.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ inputs.AWS_SECRET_ACCESS_KEY }}
shell: bash

84
.github/workflows/build-boards.yml vendored Normal file
View File

@ -0,0 +1,84 @@
name: Build boards
on:
workflow_call:
inputs:
boards:
required: true
type: string
cp-version:
required: true
type: string
secrets:
AWS_ACCESS_KEY_ID:
required: false
AWS_SECRET_ACCESS_KEY:
required: false
jobs:
board:
runs-on: ubuntu-22.04
env:
CP_VERSION: ${{ inputs.cp-version }}
strategy:
fail-fast: false
matrix:
board: ${{ fromJSON(inputs.boards) }}
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
submodules: false
fetch-depth: 1
- name: Set up python
uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Set up port
id: set-up-port
uses: ./.github/actions/deps/ports
with:
board: ${{ matrix.board }}
- name: Set up submodules
id: set-up-submodules
uses: ./.github/actions/deps/submodules
- name: Set up external
uses: ./.github/actions/deps/external
with:
port: ${{ steps.set-up-port.outputs.port }}
- name: Set up mpy-cross
if: steps.set-up-submodules.outputs.frozen == 'True'
uses: ./.github/actions/mpy_cross
- name: Versions
run: |
gcc --version
python3 --version
cmake --version || true
ninja --version || true
aarch64-none-elf-gcc --version || true
arm-none-eabi-gcc --version || true
xtensa-esp32-elf-gcc --version || true
riscv32-esp-elf-gcc --version || true
riscv64-unknown-elf-gcc --version || true
mkfs.fat --version || true
- name: Set up build failure matcher
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
- name: Build board
run: python3 -u build_release_files.py
working-directory: tools
env:
BOARDS: ${{ matrix.board }}
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.board }}
path: bin/${{ matrix.board }}
- name: Upload to S3
uses: ./.github/actions/upload_aws
with:
source: bin/
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

70
.github/workflows/build-mpy-cross.yml vendored Normal file
View File

@ -0,0 +1,70 @@
name: Build mpy-cross
on:
workflow_call:
inputs:
cp-version:
required: true
type: string
secrets:
AWS_ACCESS_KEY_ID:
required: false
AWS_SECRET_ACCESS_KEY:
required: false
jobs:
build:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
mpy-cross: ["static", "static-aarch64", "static-mingw", "static-raspbian"]
env:
CP_VERSION: ${{ inputs.cp-version }}
EX_static-mingw: static.exe
OS_static: linux-amd64
OS_static-aarch64: linux-aarch64
OS_static-mingw: windows
OS_static-raspbian: linux-raspbian
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
submodules: false
fetch-depth: 1
- name: Set up python
uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Set up submodules
uses: ./.github/actions/deps/submodules
with:
target: mpy-cross
- name: Install toolchain (aarch64)
if: matrix.mpy-cross == 'static-aarch64'
run: sudo apt-get install -y gcc-aarch64-linux-gnu
- name: Install toolchain (mingw)
if: matrix.mpy-cross == 'static-mingw'
run: sudo apt-get install -y mingw-w64
- name: Build mpy-cross.${{ matrix.mpy-cross }}
run: make -C mpy-cross -j2 -f Makefile.${{ matrix.mpy-cross }}
- name: Set output
run: |
echo >> $GITHUB_ENV "EX=${{ env[format('EX_{0}', matrix.mpy-cross)] || matrix.mpy-cross }}"
echo >> $GITHUB_ENV "OS=${{ env[format('OS_{0}', matrix.mpy-cross)] }}"
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: mpy-cross.${{ env.EX }}
path: mpy-cross/mpy-cross.${{ env.EX }}
- name: Upload to S3
uses: ./.github/actions/upload_aws
with:
source: mpy-cross/mpy-cross.${{ env.EX }}
destination: mpy-cross/${{ env.OS }}/mpy-cross-${{ env.OS }}-${{ env.CP_VERSION }}.${{ env.EX }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}

View File

@ -9,164 +9,114 @@ on:
pull_request: pull_request:
release: release:
types: [published] types: [published]
check_suite:
types: [rerequested]
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true cancel-in-progress: true
jobs: jobs:
test: scheduler:
runs-on: ubuntu-20.04 runs-on: ubuntu-22.04
outputs: outputs:
build-doc: ${{ steps.set-matrix.outputs.build-doc }} docs: ${{ steps.set-matrix.outputs.docs }}
boards-arm: ${{ steps.set-matrix.outputs.boards-arm }} ports: ${{ steps.set-matrix.outputs.ports }}
boards-riscv: ${{ steps.set-matrix.outputs.boards-riscv }} windows: ${{ steps.set-matrix.outputs.windows }}
boards-espressif: ${{ steps.set-matrix.outputs.boards-espressif }} cp-version: ${{ steps.set-up-submodules.outputs.version }}
boards-aarch: ${{ steps.set-matrix.outputs.boards-aarch }}
steps: steps:
- name: Dump GitHub context - name: Dump GitHub context
run: echo "$GITHUB_CONTEXT"
env: env:
GITHUB_CONTEXT: ${{ toJson(github) }} GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT" - name: Set up repository
- uses: actions/checkout@v2.2.0 uses: actions/checkout@v3
with: with:
submodules: false submodules: false
fetch-depth: 1 fetch-depth: 1
- name: Set up Python 3 - name: Set up python
uses: actions/setup-python@v2 uses: actions/setup-python@v4
with: with:
python-version: "3.x" python-version: 3.x
- name: Get CP deps - name: Duplicate USB VID/PID check
run: python tools/ci_fetch_deps.py test ${{ github.sha }}
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install -y eatmydata
sudo eatmydata apt-get install -y gettext gcc-aarch64-linux-gnu mingw-w64
pip install -r requirements-ci.txt -r requirements-dev.txt
- name: Versions
run: |
gcc --version
python3 --version
- name: Duplicate USB VID/PID Check
run: python3 -u -m tools.ci_check_duplicate_usb_vid_pid run: python3 -u -m tools.ci_check_duplicate_usb_vid_pid
- name: Build mpy-cross - name: Set up submodules
run: make -C mpy-cross -j2 id: set-up-submodules
- name: Build unix port uses: ./.github/actions/deps/submodules
run: |
make -C ports/unix VARIANT=coverage -j2
- name: Test all
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1
working-directory: tests
- name: Print failure info
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --print-failures
if: failure()
working-directory: tests
- name: Native Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --emit native
working-directory: tests
- name: mpy Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --via-mpy -d basics float micropython
working-directory: tests
- name: Native mpy Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --via-mpy --emit native -d basics float micropython
working-directory: tests
- name: Build native modules
run: |
make -C examples/natmod/features1
make -C examples/natmod/features2
make -C examples/natmod/btree
make -C examples/natmod/framebuf
make -C examples/natmod/uheapq
make -C examples/natmod/urandom
make -C examples/natmod/ure
make -C examples/natmod/uzlib
- name: Test native modules
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-natmodtests.py extmod/{btree*,framebuf*,uheapq*,ure*,uzlib*}.py
working-directory: tests
- name: Build mpy-cross.static-aarch64
run: make -C mpy-cross -j2 -f Makefile.static-aarch64
- uses: actions/upload-artifact@v2
with: with:
name: mpy-cross.static-aarch64 action: cache
path: mpy-cross/mpy-cross.static-aarch64 version: true
- name: Build mpy-cross.static-raspbian - name: Set up external
run: make -C mpy-cross -j2 -f Makefile.static-raspbian uses: ./.github/actions/deps/external
- uses: actions/upload-artifact@v2
with: with:
name: mpy-cross.static-raspbian action: cache
path: mpy-cross/mpy-cross.static-raspbian - name: Set up mpy-cross
- name: Build mpy-cross.static uses: ./.github/actions/mpy_cross
run: make -C mpy-cross -j2 -f Makefile.static
- uses: actions/upload-artifact@v2
with: with:
name: mpy-cross.static-amd64-linux download: false
path: mpy-cross/mpy-cross.static - name: Get last commit with checks
- name: Build mpy-cross.static-mingw id: get-last-commit-with-checks
run: make -C mpy-cross -j2 -f Makefile.static-mingw
- uses: actions/upload-artifact@v2
with:
name: mpy-cross.static-x64-windows
path: mpy-cross/mpy-cross.static.exe
- name: Upload mpy-cross builds to S3
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
env:
AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-aarch64 s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-aarch64-${{ env.CP_VERSION }} --no-progress --region us-east-1
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-raspbian s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-raspbian-${{ env.CP_VERSION }} --no-progress --region us-east-1
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-amd64-linux-${{ env.CP_VERSION }} --no-progress --region us-east-1
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static.exe s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-x64-windows-${{ env.CP_VERSION }}.exe --no-progress --region us-east-1
- name: "Get changes"
if: github.event_name == 'pull_request' if: github.event_name == 'pull_request'
uses: dorny/paths-filter@v2 working-directory: tools
id: filter run: python3 -u ci_changes_per_commit.py
with: env:
list-files: json REPO: ${{ github.repository }}
filters: | PULL: ${{ github.event.number }}
changed: GITHUB_TOKEN: ${{ github.token }}
- '**' EXCLUDE_COMMIT: ${{ github.event.pull_request.head.sha }}
- name: "Set matrix" - name: Set head sha (pull)
if: github.event_name == 'pull_request'
run: echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV
- name: Set base sha (pull)
if: github.event_name == 'pull_request'
run: git cat-file -e $SHA && echo "BASE_SHA=$SHA" >> $GITHUB_ENV || true
env:
SHA: ${{ steps.get-last-commit-with-checks.outputs.commit_sha || github.event.pull_request.base.sha }}
- name: Set head sha (push)
if: github.event_name == 'push'
run: echo "HEAD_SHA=${{ github.event.after }}" >> $GITHUB_ENV
- name: Set base sha (push)
if: github.event_name == 'push'
run: git cat-file -e $SHA && echo "BASE_SHA=$SHA" >> $GITHUB_ENV || true
env:
SHA: ${{ github.event.before }}
- name: Set matrix
id: set-matrix id: set-matrix
run: python3 -u ci_set_matrix.py
working-directory: tools working-directory: tools
env: env:
CHANGED_FILES: ${{ steps.filter.outputs.changed_files }} LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.check_runs }}
run: python3 -u ci_set_matrix.py
tests:
needs: scheduler
uses: ./.github/workflows/run-tests.yml
with:
cp-version: ${{ needs.scheduler.outputs.cp-version }}
mpy-cross:
needs: scheduler
if: needs.scheduler.outputs.ports != '{}'
uses: ./.github/workflows/build-mpy-cross.yml
secrets: inherit
with:
cp-version: ${{ needs.scheduler.outputs.cp-version }}
mpy-cross-mac: mpy-cross-mac:
runs-on: macos-10.15 runs-on: macos-11
steps: needs: scheduler
- name: Dump GitHub context if: needs.scheduler.outputs.ports != '{}'
env: env:
GITHUB_CONTEXT: ${{ toJson(github) }} CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
run: echo "$GITHUB_CONTEXT" steps:
- uses: actions/checkout@v2.2.0 - name: Set up repository
uses: actions/checkout@v3
with: with:
submodules: false submodules: false
fetch-depth: 1 fetch-depth: 1
- name: Set up Python 3 - name: Set up python
uses: actions/setup-python@v2 uses: actions/setup-python@v4
with: with:
python-version: "3.x" python-version: 3.x
- name: Get CP deps - name: Set up submodules
run: python tools/ci_fetch_deps.py mpy-cross-mac ${{ github.sha }} uses: ./.github/actions/deps/submodules
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- name: Install dependencies
run: |
brew install gettext
echo >>$GITHUB_PATH /usr/local/opt/gettext/bin
- name: Versions - name: Versions
run: | run: |
gcc --version gcc --version
@ -174,353 +124,172 @@ jobs:
msgfmt --version msgfmt --version
- name: Build mpy-cross - name: Build mpy-cross
run: make -C mpy-cross -j2 run: make -C mpy-cross -j2
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: mpy-cross-macos-catalina name: mpy-cross-macos-11-x64
path: mpy-cross/mpy-cross path: mpy-cross/mpy-cross
- name: Select SDK for M1 build
run: sudo xcode-select -switch /Applications/Xcode_12.3.app
- name: Build mpy-cross (arm64) - name: Build mpy-cross (arm64)
run: make -C mpy-cross -j2 -f Makefile.m1 V=2 run: make -C mpy-cross -j2 -f Makefile.m1 V=2
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: mpy-cross-macos-bigsur-arm64 name: mpy-cross-macos-11-arm64
path: mpy-cross/mpy-cross-arm64 path: mpy-cross/mpy-cross-arm64
- name: Make universal binary - name: Make universal binary
run: lipo -create -output mpy-cross-macos-universal mpy-cross/mpy-cross mpy-cross/mpy-cross-arm64 run: lipo -create -output mpy-cross-macos-universal mpy-cross/mpy-cross mpy-cross/mpy-cross-arm64
- uses: actions/upload-artifact@v2 - name: Upload artifact
uses: actions/upload-artifact@v3
with: with:
name: mpy-cross-macos-universal name: mpy-cross-macos-11-universal
path: mpy-cross-macos-universal path: mpy-cross-macos-universal
- name: Upload mpy-cross build to S3 - name: Upload to S3
if: >-
(github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') ||
(github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
run: | run: |
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross-macos-universal s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-universal-${{ env.CP_VERSION }} --no-progress --region us-east-1 [ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross-macos-universal s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-universal --no-progress --region us-east-1
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross-arm64 s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-bigsur-${{ env.CP_VERSION }}-arm64 --no-progress --region us-east-1 [ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross-arm64 s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-arm64 --no-progress --region us-east-1
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-catalina-${{ env.CP_VERSION }} --no-progress --region us-east-1 [ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-x64 --no-progress --region us-east-1
env: env:
AWS_PAGER: '' AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
docs:
build-doc: runs-on: ubuntu-22.04
runs-on: ubuntu-20.04 needs: scheduler
needs: test if: needs.scheduler.outputs.docs == 'True'
if: ${{ needs.test.outputs.build-doc == 'True' }} env:
CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
steps: steps:
- uses: actions/checkout@v2.2.0 - name: Set up repository
uses: actions/checkout@v3
with: with:
submodules: false submodules: false
fetch-depth: 1 fetch-depth: 1
- name: Get CP deps - name: Set up python
run: python tools/ci_fetch_deps.py docs ${{ github.sha }} uses: actions/setup-python@v4
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- name: Set up Python 3
uses: actions/setup-python@v2
with: with:
python-version: "3.x" python-version: 3.x
- name: Set up submodules
uses: ./.github/actions/deps/submodules
- name: Install dependencies - name: Install dependencies
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y eatmydata sudo apt-get install -y latexmk librsvg2-bin texlive-fonts-recommended texlive-latex-recommended texlive-latex-extra
sudo eatmydata apt-get install -y latexmk librsvg2-bin texlive-fonts-recommended texlive-latex-recommended texlive-latex-extra pip install -r requirements-doc.txt
pip install -r requirements-ci.txt -r requirements-doc.txt
- name: Build and Validate Stubs - name: Build and Validate Stubs
run: make check-stubs -j2 run: make check-stubs -j2
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: stubs name: stubs
path: circuitpython-stubs/dist/* path: circuitpython-stubs/dist/*
- name: Test Documentation Build (HTML) - name: Test Documentation Build (HTML)
run: sphinx-build -E -W -b html -D version=${{ env.CP_VERSION }} -D release=${{ env.CP_VERSION }} . _build/html run: sphinx-build -E -W -b html -D version=${{ env.CP_VERSION }} -D release=${{ env.CP_VERSION }} . _build/html
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: docs name: docs
path: _build/html path: _build/html
- name: Test Documentation Build (LaTeX/PDF) - name: Test Documentation Build (LaTeX/PDF)
run: | run: |
make latexpdf make latexpdf
- uses: actions/upload-artifact@v2 - uses: actions/upload-artifact@v3
with: with:
name: docs name: docs
path: _build/latex path: _build/latex
- name: Upload stubs to S3 - name: Upload to S3
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')) uses: ./.github/actions/upload_aws
env: with:
AWS_PAGER: '' source: circuitpython-stubs/dist/*.tar.gz
destination: stubs/circuitpython-stubs-${{ env.CP_VERSION }}.tar.gz
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
run: |
zip -9r circuitpython-stubs.zip circuitpython-stubs
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp circuitpython-stubs/dist/*.tar.gz s3://adafruit-circuit-python/bin/stubs/circuitpython-stubs-${{ env.CP_VERSION }}.zip --no-progress --region us-east-1
- name: Upload stubs to PyPi - name: Upload stubs to PyPi
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested') if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
env: env:
TWINE_USERNAME: ${{ secrets.pypi_username }} TWINE_USERNAME: ${{ secrets.pypi_username }}
TWINE_PASSWORD: ${{ secrets.pypi_password }} TWINE_PASSWORD: ${{ secrets.pypi_password }}
run: | run: |
# setup.py sdist was run by 'make stubs' # python -m build was run by 'make stubs'
[ -z "$TWINE_USERNAME" ] || echo "Uploading dev release to PyPi" [ -z "$TWINE_USERNAME" ] || echo "Uploading dev release to PyPi"
[ -z "$TWINE_USERNAME" ] || twine upload circuitpython-stubs/dist/* [ -z "$TWINE_USERNAME" ] || twine upload circuitpython-stubs/dist/*
windows:
build-arm: runs-on: windows-2022
runs-on: ubuntu-20.04 needs: scheduler
needs: test if: needs.scheduler.outputs.windows == 'True'
strategy: env:
fail-fast: false CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
matrix: defaults:
board: ${{ fromJSON(needs.test.outputs.boards-arm) }} run:
if: ${{ needs.test.outputs.boards-arm != '[]' }} # We define a custom shell script here, although `msys2.cmd` does neither exist nor is it available in the PATH yet
shell: msys2 {0}
steps: steps:
- name: Set up Python 3 # We want to change the configuration of the git command that actions/checkout will be using
uses: actions/setup-python@v2 # (since it is not possible to set autocrlf through the action yet, see actions/checkout#226).
with: - run: git config --global core.autocrlf input
python-version: "3.x"
- uses: actions/checkout@v2.2.0
with:
submodules: false
fetch-depth: 1
- name: Get CP deps
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
- name: Install dependencies
run: |
sudo apt-get install -y gettext
pip install -r requirements-ci.txt -r requirements-dev.txt
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
- name: Versions
run: |
gcc --version
arm-none-eabi-gcc --version
python3 --version
- name: mpy-cross
run: make -C mpy-cross -j2
- name: Setup build failure matcher
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
- name: build
run: python3 -u build_release_files.py
working-directory: tools
env:
BOARDS: ${{ matrix.board }}
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.board }}
path: bin/${{ matrix.board }}
- name: Upload to S3
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
env:
AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
build-riscv:
runs-on: ubuntu-20.04
needs: test
strategy:
fail-fast: false
matrix:
board: ${{ fromJSON(needs.test.outputs.boards-riscv) }}
if: ${{ needs.test.outputs.boards-riscv != '[]' }}
steps:
- name: Set up Python 3
uses: actions/setup-python@v2
with:
python-version: "3.x"
- uses: actions/checkout@v2.2.0
with:
submodules: false
fetch-depth: 1
- name: Get CP deps
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
- name: Install dependencies
run: |
sudo apt-get install -y gettext
pip install -r requirements-ci.txt -r requirements-dev.txt
wget https://static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
sudo tar -C /usr --strip-components=1 -xaf riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
- name: Versions
run: |
gcc --version
riscv64-unknown-elf-gcc --version
python3 --version
- name: mpy-cross
run: make -C mpy-cross -j2
- name: Setup build failure matcher
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
- name: build
run: python3 -u build_release_files.py
working-directory: tools
env:
BOARDS: ${{ matrix.board }}
- uses: actions/upload-artifact@v2
with:
name: ${{ matrix.board }}
path: bin/${{ matrix.board }}
- name: Upload to S3
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
env:
AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
build-espressif:
runs-on: ubuntu-20.04
needs: test
strategy:
fail-fast: false
matrix:
board: ${{ fromJSON(needs.test.outputs.boards-espressif) }}
if: ${{ needs.test.outputs.boards-espressif != '[]' }}
steps:
- name: Set up Python 3
uses: actions/setup-python@v2
with:
python-version: "3.x"
- uses: actions/checkout@v2.2.0
with:
submodules: false
fetch-depth: 1
- name: Get CP deps
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- uses: actions/cache@v2
name: Fetch IDF tool cache
id: idf-cache
with:
path: ${{ github.workspace }}/.idf_tools
key: ${{ runner.os }}-idf-tools-${{ hashFiles('.git/modules/ports/espressif/esp-idf/HEAD') }}-20210923
- name: Clone IDF submodules
run: |
(cd $IDF_PATH && git submodule update --init)
env:
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
- name: Install IDF tools
run: |
$IDF_PATH/tools/idf_tools.py --non-interactive install required
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
rm -rf $IDF_TOOLS_PATH/dist
env:
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
- name: Install dependencies
run: |
source $IDF_PATH/export.sh
sudo apt-get install -y gettext ninja-build
pip install -r requirements-ci.txt -r requirements-dev.txt
env:
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
- name: Versions
run: |
source $IDF_PATH/export.sh
gcc --version
xtensa-esp32s2-elf-gcc --version
python3 --version
ninja --version
cmake --version
shell: bash shell: bash
env: - name: Check python coding (cmd)
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf run: python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools shell: cmd
- name: mpy-cross # We use a JS Action, which calls the system terminal or other custom terminals directly, if required
run: make -C mpy-cross -j2 - uses: msys2/setup-msys2@v2
- name: Setup build failure matcher with:
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json" install: base-devel git wget unzip gcc python-pip
- name: build # The goal of this was to test how things worked when the default file encoding (locale.getpreferedencoding())
# was not UTF-8. However, msys2 python does use utf-8 as the preferred file encoding, and using actions/setup-python
# python3.8 gave a broken build, so we're not really testing what we wanted to test.
# However, commandline length limits are being tested so that does some good.
- name: Check python coding (msys2)
run: | run: |
source $IDF_PATH/export.sh locale -v
python3 -u build_release_files.py which python; python --version
working-directory: tools python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
shell: bash which python3; python3 --version
env: python3 -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf - name: Install dependencies
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools run: |
BOARDS: ${{ matrix.board }} wget --no-verbose -O gcc-arm.zip https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip
- uses: actions/upload-artifact@v2 unzip -q -d /tmp gcc-arm.zip
with: tar -C /tmp/gcc-arm-none-* -cf - . | tar -C /usr/local -xf -
name: ${{ matrix.board }} pip install wheel
path: bin/${{ matrix.board }} # requirements_dev.txt doesn't install on windows. (with msys2 python)
- name: Upload to S3 # instead, pick a subset for what we want to do
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1" pip install cascadetoml jinja2 typer click intelhex
env: # check that installed packages work....?
AWS_PAGER: '' which python; python --version; python -c "import cascadetoml"
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} which python3; python3 --version; python3 -c "import cascadetoml"
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - name: Set up repository
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')) uses: actions/checkout@v3
build-aarch:
runs-on: ubuntu-20.04
needs: test
strategy:
fail-fast: false
matrix:
board: ${{ fromJSON(needs.test.outputs.boards-aarch) }}
if: ${{ needs.test.outputs.boards-aarch != '[]' }}
steps:
- name: Set up Python 3
uses: actions/setup-python@v2
with:
python-version: "3.x"
- uses: actions/checkout@v2.2.0
with: with:
submodules: false submodules: false
fetch-depth: 1 fetch-depth: 1
- name: Get CP deps - name: Set up submodules
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }} uses: ./.github/actions/deps/submodules
- name: Install dependencies - name: build mpy-cross
run: | run: make -j2 -C mpy-cross
sudo apt-get install -y gettext mtools - name: build rp2040
pip install -r requirements-ci.txt -r requirements-dev.txt run: make -j2 -C ports/raspberrypi BOARD=adafruit_feather_rp2040 TRANSLATION=de_DE
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz - name: build samd21
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz run: make -j2 -C ports/atmel-samd BOARD=feather_m0_express TRANSLATION=zh_Latn_pinyin
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2 - name: build samd51
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2 run: make -j2 -C ports/atmel-samd BOARD=feather_m4_express TRANSLATION=es
- name: Install mkfs.fat - name: build nrf
run: | run: make -j2 -C ports/nrf BOARD=feather_nrf52840_express TRANSLATION=fr
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz - name: build stm
tar -xaf dosfstools-4.2.tar.gz run: make -j2 -C ports/stm BOARD=feather_stm32f405_express TRANSLATION=pt_BR
cd dosfstools-4.2 # I gave up trying to do esp builds on windows when I saw
./configure # ERROR: Platform MINGW64_NT-10.0-17763-x86_64 appears to be unsupported
make -j 2 # https://github.com/espressif/esp-idf/issues/7062
cd src
echo >>$GITHUB_PATH $(pwd) ports:
- name: Versions needs: [scheduler, mpy-cross, tests]
run: | if: needs.scheduler.outputs.ports != '{}'
gcc --version uses: ./.github/workflows/build-boards.yml
aarch64-none-elf-gcc --version secrets: inherit
arm-none-eabi-gcc --version strategy:
python3 --version fail-fast: false
mkfs.fat --version || true matrix:
- name: mpy-cross port: ${{ fromJSON(needs.scheduler.outputs.ports).ports }}
run: make -C mpy-cross -j2
- name: Setup build failure matcher
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
- name: build
run: python3 -u build_release_files.py
working-directory: tools
env:
BOARDS: ${{ matrix.board }}
- uses: actions/upload-artifact@v2
with: with:
name: ${{ matrix.board }} boards: ${{ toJSON(fromJSON(needs.scheduler.outputs.ports)[matrix.port]) }}
path: bin/${{ matrix.board }} cp-version: ${{ needs.scheduler.outputs.cp-version }}
- name: Upload to S3
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
env:
AWS_PAGER: ''
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))

View File

@ -10,37 +10,34 @@ on:
jobs: jobs:
website: website:
runs-on: ubuntu-20.04 runs-on: ubuntu-22.04
steps: steps:
- name: Dump GitHub context - name: Dump GitHub context
run: echo "$GITHUB_CONTEXT"
env: env:
GITHUB_CONTEXT: ${{ toJson(github) }} GITHUB_CONTEXT: ${{ toJson(github) }}
run: echo "$GITHUB_CONTEXT" - name: Set up repository
- uses: actions/checkout@v2.2.0 uses: actions/checkout@v3
with: with:
submodules: false submodules: false
fetch-depth: 1 fetch-depth: 1
- name: Set up Python 3 - name: Set up python
uses: actions/setup-python@v2 uses: actions/setup-python@v4
with: with:
python-version: "3.x" python-version: 3.x
- name: Get CP deps - name: Set up submodules
run: python tools/ci_fetch_deps.py website ${{ github.sha }} uses: ./.github/actions/deps/submodules
- name: Install deps with:
run: | version: true
pip install -r requirements-dev.txt - name: Set up external
uses: ./.github/actions/deps/external
- name: Versions - name: Versions
run: | run: |
gcc --version gcc --version
python3 --version python3 --version
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- name: Website - name: Website
run: python3 build_board_info.py run: python3 build_board_info.py
working-directory: tools working-directory: tools
env: env:
RELEASE_TAG: ${{ github.event.release.tag_name }} RELEASE_TAG: ${{ github.event.release.tag_name }}
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.BLINKA_GITHUB_ACCESS_TOKEN }} ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')

View File

@ -0,0 +1,89 @@
name: Custom board build
on:
workflow_dispatch:
inputs:
board:
description: 'Board: Found in ports/*/boards/[board_id]'
required: true
type: string
version:
description: 'Version: Can be a tag or a commit (>=8.1.0)'
required: false
default: latest
type: string
language:
description: 'Language: Found in locale/[language].po'
required: false
default: en_US
type: string
flags:
description: 'Flags: Build flags (e.g. CIRCUITPY_WIFI=1)'
required: false
type: string
debug:
description: 'Make a debug build'
required: false
default: false
type: boolean
run-name: ${{ inputs.board }}-${{ inputs.language }}-${{ inputs.version }}${{ inputs.flags != '' && '-custom' || '' }}${{ inputs.debug && '-debug' || '' }}
jobs:
build:
runs-on: ubuntu-22.04
steps:
- name: Set up repository
run: |
git clone --filter=tree:0 https://github.com/adafruit/circuitpython.git $GITHUB_WORKSPACE
git checkout ${{ inputs.version == 'latest' && 'HEAD' || inputs.version }}
- name: Set up identifier
if: inputs.debug || inputs.flags != ''
run: |
> custom-build && git add custom-build
- name: Set up python
uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Set up port
id: set-up-port
uses: ./.github/actions/deps/ports
with:
board: ${{ inputs.board }}
- name: Set up submodules
id: set-up-submodules
uses: ./.github/actions/deps/submodules
with:
action: cache
target: ${{ inputs.board }}
- name: Set up external
uses: ./.github/actions/deps/external
with:
action: cache
port: ${{ steps.set-up-port.outputs.port }}
- name: Set up mpy-cross
if: steps.set-up-submodules.outputs.frozen == 'True'
uses: ./.github/actions/mpy_cross
with:
download: false
- name: Versions
run: |
tools/describe
gcc --version
python3 --version
cmake --version || true
ninja --version || true
aarch64-none-elf-gcc --version || true
arm-none-eabi-gcc --version || true
xtensa-esp32-elf-gcc --version || true
riscv32-esp-elf-gcc --version || true
riscv64-unknown-elf-gcc --version || true
mkfs.fat --version || true
- name: Build board
run: make -j2 ${{ inputs.flags }} BOARD=${{ inputs.board }} DEBUG=${{ inputs.debug && '1' || '0' }} TRANSLATION=${{ inputs.language }}
working-directory: ports/${{ steps.set-up-port.outputs.port }}
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: ${{ inputs.board }}-${{ inputs.language }}-${{ inputs.version }}${{ inputs.flags != '' && '-custom' || '' }}${{ inputs.debug && '-debug' || '' }}
path: ports/${{ steps.set-up-port.outputs.port }}/build-${{ inputs.board }}/firmware.*

View File

@ -0,0 +1,18 @@
name: Notify users based on issue labels
on:
issues:
types: [labeled]
jobs:
notify:
runs-on: ubuntu-latest
permissions:
issues: write
steps:
- uses: tekktrik/issue-labeled-ping@v1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
user: v923z
label: ulab
message: Heads up {user} - the "{label}" label was applied to this issue.

View File

@ -1,14 +0,0 @@
name: Notify users based on issue labels
on:
issues:
types: [labeled]
jobs:
notify:
runs-on: ubuntu-latest
steps:
- uses: jenschelkopf/issue-label-notification-action@1.3
with:
recipients: |
ulab=@v923z

View File

@ -1,110 +0,0 @@
name: windows port
on:
push:
pull_request:
paths:
- '.github/workflows/*.yml'
- 'tools/**'
- 'py/**'
- 'extmod/**'
- 'lib/**'
- 'ports/unix/**'
- 'ports/windows/**'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
build:
runs-on: windows-2019
defaults:
run:
# We define a custom shell script here, although `msys2.cmd` does neither exist nor is it available in the PATH yet
shell: msys2 {0}
steps:
# We want to change the configuration of the git command that actions/checkout will be using (since it is not possible to set autocrlf through the action yet, see actions/checkout#226).
- run: git config --global core.autocrlf input
shell: bash
- name: Check python coding (cmd)
run: |
python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
shell: cmd
# We use a JS Action, which calls the system terminal or other custom terminals directly, if required
- uses: msys2/setup-msys2@v2
with:
update: true
install: base-devel git wget unzip gcc python-pip
# The goal of this was to test how things worked when the default file
# encoding (locale.getpreferedencoding()) was not UTF-8. However, msys2
# python does use utf-8 as the preferred file encoding, and using
# actions/setup-python python3.8 gave a broken build, so we're not really
# testing what we wanted to test.
#
# however, commandline length limits are being tested so that does some
# good.
- name: Check python coding (msys2)
run: |
locale -v
which python; python --version
python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
which python3; python3 --version
python3 -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
- name: Install dependencies
run: |
wget --no-verbose -O gcc-arm.zip https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip
unzip -q -d /tmp gcc-arm.zip
tar -C /tmp/gcc-arm-none-* -cf - . | tar -C /usr/local -xf -
pip install wheel
# requirements_dev.txt doesn't install on windows. (with msys2 python)
# instead, pick a subset for what we want to do
pip install cascadetoml jinja2 typer intelhex
# check that installed packages work....?
which python; python --version; python -c "import cascadetoml"
which python3; python3 --version; python3 -c "import cascadetoml"
- uses: actions/checkout@v2.2.0
with:
submodules: false
fetch-depth: 1
- name: Get CP deps
run: python tools/ci_fetch_deps.py windows ${{ github.sha }}
- name: CircuitPython version
run: |
tools/describe || git log --parents HEAD~4..
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
- name: build mpy-cross
run: make -j2 -C mpy-cross
- name: build rp2040
run: make -j2 -C ports/raspberrypi BOARD=adafruit_feather_rp2040 TRANSLATION=de_DE
- name: build samd21
run: make -j2 -C ports/atmel-samd BOARD=feather_m0_express TRANSLATION=zh_Latn_pinyin
- name: build samd51
run: make -j2 -C ports/atmel-samd BOARD=feather_m4_express TRANSLATION=es
- name: build nrf
run: make -j2 -C ports/nrf BOARD=feather_nrf52840_express TRANSLATION=fr
- name: build stm
run: make -j2 -C ports/stm BOARD=feather_stm32f405_express TRANSLATION=pt_BR
# I gave up trying to do esp32 builds on windows when I saw
# ERROR: Platform MINGW64_NT-10.0-17763-x86_64 appears to be unsupported
# https://github.com/espressif/esp-idf/issues/7062
#
# - name: prepare esp
# run: ports/espressif/esp-idf/install.bat
# shell: cmd
#
# - name: build esp
# run: . ports/espressif/esp-idf/export.sh && make -j2 -C ports/espressif BOARD=adafruit_metro_esp32s2

View File

@ -5,8 +5,8 @@
name: pre-commit name: pre-commit
on: on:
pull_request:
push: push:
pull_request:
concurrency: concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
@ -14,33 +14,33 @@ concurrency:
jobs: jobs:
pre-commit: pre-commit:
runs-on: ubuntu-20.04 runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v2.2.0 - name: Set up repository
- name: Set up Python 3 uses: actions/checkout@v3
uses: actions/setup-python@v2
with: with:
python-version: "3.x" submodules: false
- name: Install deps fetch-depth: 1
- name: Set up python
uses: actions/setup-python@v4
with:
python-version: 3.x
- name: Set up submodules
uses: ./.github/actions/deps/submodules
- name: Set up external
uses: ./.github/actions/deps/external
- name: Install dependencies
run: | run: |
sudo apt-add-repository -y -u ppa:pybricks/ppa sudo apt-get update
sudo apt-get install -y gettext uncrustify sudo apt-get install -y gettext uncrustify
pip3 install black polib pyyaml - name: Run pre-commit
- name: Populate selected submodules uses: pre-commit/action@v3.0.0
run: git submodule update --init extmod/ulab
- name: Set PY
run: echo >>$GITHUB_ENV PY="$(python -c 'import hashlib, sys;print(hashlib.sha256(sys.version.encode()+sys.executable.encode()).hexdigest())')"
- uses: actions/cache@v2
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
- uses: pre-commit/action@v1.1.0
- name: Make patch - name: Make patch
if: failure() if: failure()
run: git diff > ~/pre-commit.patch run: git diff > ~/pre-commit.patch
- name: Upload patch - name: Upload patch
if: failure() if: failure()
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
with: with:
name: patch name: patch
path: ~/pre-commit.patch path: ~/pre-commit.patch

67
.github/workflows/run-tests.yml vendored Normal file
View File

@ -0,0 +1,67 @@
name: Run tests
on:
workflow_call:
inputs:
cp-version:
required: true
type: string
jobs:
run:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
matrix:
test: [all, mpy, native, native_mpy]
env:
CP_VERSION: ${{ inputs.cp-version }}
MICROPY_CPYTHON3: python3.8
MICROPY_MICROPYTHON: ../ports/unix/micropython-coverage
TEST_all:
TEST_mpy: --via-mpy -d basics float micropython
TEST_native: --emit native
TEST_native_mpy: --via-mpy --emit native -d basics float micropython
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
submodules: false
fetch-depth: 1
- name: Set up python
uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Set up submodules
uses: ./.github/actions/deps/submodules
with:
target: tests
- name: Set up external
if: matrix.test == 'all'
uses: ./.github/actions/deps/external
- name: Set up mpy-cross
uses: ./.github/actions/mpy_cross
- name: Build unix port
run: make -C ports/unix VARIANT=coverage -j2
- name: Run tests
run: ./run-tests.py -j2 ${{ env[format('TEST_{0}', matrix.test)] }}
working-directory: tests
- name: Print failure info
run: ./run-tests.py -j2 --print-failures
if: failure()
working-directory: tests
- name: Build native modules
if: matrix.test == 'all'
run: |
make -C examples/natmod/features1
make -C examples/natmod/features2
make -C examples/natmod/btree
make -C examples/natmod/framebuf
make -C examples/natmod/uheapq
make -C examples/natmod/urandom
make -C examples/natmod/ure
make -C examples/natmod/uzlib
- name: Test native modules
if: matrix.test == 'all'
run: ./run-natmodtests.py extmod/{btree*,framebuf*,uheapq*,ure*,uzlib*}.py
working-directory: tests

1
.gitignore vendored
View File

@ -9,6 +9,7 @@
!atmel-samd/asf/**/*.a !atmel-samd/asf/**/*.a
*.elf *.elf
*.bin *.bin
!*.toml.bin
*.map *.map
*.hex *.hex
*.dis *.dis

80
.gitmodules vendored
View File

@ -33,9 +33,6 @@
path = ports/atmel-samd/asf4 path = ports/atmel-samd/asf4
url = https://github.com/adafruit/asf4.git url = https://github.com/adafruit/asf4.git
branch = circuitpython branch = circuitpython
[submodule "tools/usb_descriptor"]
path = tools/usb_descriptor
url = https://github.com/adafruit/usb_descriptor.git
[submodule "lib/nrfutil"] [submodule "lib/nrfutil"]
path = lib/nrfutil path = lib/nrfutil
url = https://github.com/adafruit/nRF52_nrfutil url = https://github.com/adafruit/nRF52_nrfutil
@ -103,7 +100,7 @@
url = https://github.com/adafruit/Adafruit_MP3 url = https://github.com/adafruit/Adafruit_MP3
[submodule "ports/mimxrt10xx/sdk"] [submodule "ports/mimxrt10xx/sdk"]
path = ports/mimxrt10xx/sdk path = ports/mimxrt10xx/sdk
url = https://github.com/adafruit/MIMXRT10xx_SDK url = https://github.com/nxp-mcuxpresso/mcux-sdk.git
[submodule "frozen/Adafruit_CircuitPython_Register"] [submodule "frozen/Adafruit_CircuitPython_Register"]
path = frozen/Adafruit_CircuitPython_Register path = frozen/Adafruit_CircuitPython_Register
url = https://github.com/adafruit/Adafruit_CircuitPython_Register.git url = https://github.com/adafruit/Adafruit_CircuitPython_Register.git
@ -145,10 +142,10 @@
url = https://github.com/adafruit/Adafruit_CircuitPython_RFM69.git url = https://github.com/adafruit/Adafruit_CircuitPython_RFM69.git
[submodule "ports/espressif/esp-idf"] [submodule "ports/espressif/esp-idf"]
path = ports/espressif/esp-idf path = ports/espressif/esp-idf
url = https://github.com/espressif/esp-idf.git url = https://github.com/adafruit/esp-idf.git
branch = release/v4.4 branch = release/v4.4-circuitpython
[submodule "ports/espressif/certificates/nina-fw"] [submodule "ports/espressif/certificates/nina-fw"]
path = ports/espressif/certificates/nina-fw path = lib/certificates/nina-fw
url = https://github.com/adafruit/nina-fw.git url = https://github.com/adafruit/nina-fw.git
[submodule "frozen/Adafruit_CircuitPython_ST7789"] [submodule "frozen/Adafruit_CircuitPython_ST7789"]
path = frozen/Adafruit_CircuitPython_ST7789 path = frozen/Adafruit_CircuitPython_ST7789
@ -187,15 +184,14 @@
[submodule "frozen/Adafruit_CircuitPython_APDS9960"] [submodule "frozen/Adafruit_CircuitPython_APDS9960"]
path = frozen/Adafruit_CircuitPython_APDS9960 path = frozen/Adafruit_CircuitPython_APDS9960
url = https://github.com/adafruit/Adafruit_CircuitPython_APDS9960 url = https://github.com/adafruit/Adafruit_CircuitPython_APDS9960
[submodule "ports/broadcom/peripherals"]
path = ports/broadcom/peripherals
url = https://github.com/adafruit/broadcom-peripherals.git
branch = main-build
[submodule "rpi-firmware"] [submodule "rpi-firmware"]
path = ports/broadcom/firmware path = ports/broadcom/firmware
url = https://github.com/raspberrypi/rpi-firmware.git url = https://github.com/raspberrypi/rpi-firmware.git
branch = master branch = master
shallow = true shallow = true
[submodule "lib/adafruit_floppy"]
path = lib/adafruit_floppy
url = https://github.com/adafruit/Adafruit_Floppy
[submodule "ports/stm/st_driver/cmsis_device_f4"] [submodule "ports/stm/st_driver/cmsis_device_f4"]
path = ports/stm/st_driver/cmsis_device_f4 path = ports/stm/st_driver/cmsis_device_f4
url = https://github.com/STMicroelectronics/cmsis_device_f4.git url = https://github.com/STMicroelectronics/cmsis_device_f4.git
@ -283,3 +279,65 @@
[submodule "frozen/Adafruit_CircuitPython_FakeRequests"] [submodule "frozen/Adafruit_CircuitPython_FakeRequests"]
path = frozen/Adafruit_CircuitPython_FakeRequests path = frozen/Adafruit_CircuitPython_FakeRequests
url = https://github.com/adafruit/Adafruit_CircuitPython_FakeRequests.git url = https://github.com/adafruit/Adafruit_CircuitPython_FakeRequests.git
[submodule "frozen/pew-pewpew-lcd"]
path = frozen/pew-pewpew-lcd
url = https://github.com/pypewpew/pew-pewpew-lcd.git
[submodule "frozen/mixgo_cp_lib"]
path = frozen/mixgo_cp_lib
url = https://github.com/dahanzimin/circuitpython_lib.git
[submodule "frozen/Adafruit_CircuitPython_IS31FL3731"]
path = frozen/Adafruit_CircuitPython_IS31FL3731
url = https://github.com/adafruit/Adafruit_CircuitPython_IS31FL3731.git
[submodule "frozen/Adafruit_CircuitPython_Ticks"]
path = frozen/Adafruit_CircuitPython_Ticks
url = https://github.com/adafruit/Adafruit_CircuitPython_Ticks.git
[submodule "frozen/Adafruit_CircuitPython_asyncio"]
path = frozen/Adafruit_CircuitPython_asyncio
url = https://github.com/adafruit/Adafruit_CircuitPython_asyncio.git
[submodule "frozen/circuitpython_ef_music"]
path = frozen/circuitpython_ef_music
url = https://github.com/elecfreaks/circuitpython_ef_music.git
[submodule "frozen/circuitpython_picoed"]
path = frozen/circuitpython_picoed
url = https://github.com/elecfreaks/circuitpython_picoed.git
[submodule "ports/espressif/esp32-camera"]
path = ports/espressif/esp32-camera
url = https://github.com/adafruit/esp32-camera/
branch = circuitpython
[submodule "ports/raspberrypi/lib/cyw43-driver"]
path = ports/raspberrypi/lib/cyw43-driver
url = https://github.com/georgerobotics/cyw43-driver.git
[submodule "ports/raspberrypi/lib/lwip"]
path = ports/raspberrypi/lib/lwip
url = https://github.com/adafruit/lwip.git
branch = circuitpython8
[submodule "lib/mbedtls"]
path = lib/mbedtls
url = https://github.com/ARMmbed/mbedtls.git
[submodule "frozen/Adafruit_CircuitPython_UC8151D"]
path = frozen/Adafruit_CircuitPython_UC8151D
url = https://github.com/adafruit/Adafruit_CircuitPython_UC8151D
[submodule "frozen/Adafruit_CircuitPython_SSD1680"]
path = frozen/Adafruit_CircuitPython_SSD1680
url = https://github.com/adafruit/Adafruit_CircuitPython_SSD1680
[submodule "ports/broadcom/peripherals"]
path = ports/broadcom/peripherals
url = https://github.com/adafruit/broadcom-peripherals.git
branch = main-build
[submodule "ports/silabs/gecko_sdk"]
path = ports/silabs/gecko_sdk
url = https://github.com/SiliconLabs/gecko_sdk.git
branch = v4.2.1
[submodule "ports/silabs/tools/slc_cli_linux"]
path = ports/silabs/tools/slc_cli_linux
url = https://github.com/SiliconLabs/circuitpython_slc_cli_linux
[submodule "ports/raspberrypi/lib/PicoDVI"]
path = ports/raspberrypi/lib/PicoDVI
url = https://github.com/circuitpython/PicoDVI.git
branch = circuitpython
[submodule "frozen/circuitpython-pcf85063a"]
path = frozen/circuitpython-pcf85063a
url = https://github.com/bablokb/circuitpython-pcf85063a
[submodule "frozen/Adafruit_CircuitPython_Wave"]
path = frozen/Adafruit_CircuitPython_Wave
url = http://github.com/adafruit/Adafruit_CircuitPython_Wave.git

View File

@ -8,9 +8,19 @@ repos:
hooks: hooks:
- id: check-yaml - id: check-yaml
- id: end-of-file-fixer - id: end-of-file-fixer
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*|ports/espressif/esp-idf-config/.*|ports/espressif/boards/.*/sdkconfig)'
- id: trailing-whitespace
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*)' exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*)'
- id: trailing-whitespace
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*|lib/mbedtls_errors/generate_errors.diff)'
- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
hooks:
- id: codespell
args: [-w]
exclude: |
(?x)^(
locale/|
lib/
)
- repo: local - repo: local
hooks: hooks:
- id: translations - id: translations

View File

@ -12,10 +12,9 @@ build:
os: ubuntu-20.04 os: ubuntu-20.04
tools: tools:
python: "3" python: "3"
jobs:
submodules: post_install:
include: - python tools/ci_fetch_deps.py docs
- extmod/ulab
formats: formats:
- pdf - pdf

View File

@ -35,6 +35,8 @@ Failing to install these will prevent from properly building.
pip3 install -r requirements-dev.txt pip3 install -r requirements-dev.txt
If you run into an error installing minify_html, you may need to install `rust`.
### mpy-cross ### mpy-cross
As part of the build process, mpy-cross is needed to compile .py files into .mpy files. As part of the build process, mpy-cross is needed to compile .py files into .mpy files.
@ -70,7 +72,7 @@ The test suite in the top level `tests` directory. It needs the unix port to ru
Then you can run the test suite: Then you can run the test suite:
cd ../../tests cd ../../tests
./run-tests ./run-tests.py
A successful run will say something like A successful run will say something like
@ -107,7 +109,7 @@ Pre-commit also requires some additional programs to be installed through your p
* Standard Unix tools such as make, find, etc * Standard Unix tools such as make, find, etc
* The gettext package, any modern version * The gettext package, any modern version
* uncrustify version 0.71 (0.72 is also tested) * uncrustify version 0.71 (0.72 is also tested and OK; 0.75 is not OK)
Each time you create a git commit, the pre-commit quality checks will be run. You can also run them e.g., with `pre-commit run foo.c` or `pre-commit run --all` to run on all files whether modified or not. Each time you create a git commit, the pre-commit quality checks will be run. You can also run them e.g., with `pre-commit run foo.c` or `pre-commit run --all` to run on all files whether modified or not.

View File

@ -123,7 +123,7 @@ accordingly.
## Attribution ## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org),
version 1.4, available at version 1.4, available at
<https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>, <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>,
and the [Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html). and the [Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html).

76
LICENSE
View File

@ -1,6 +1,6 @@
The MIT License (MIT) MIT License
Copyright (c) 2013-2022 Damien P. George Copyright (c) 2013-2022 Damien P. George and others
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal
@ -9,77 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in The above copyright notice and this permission notice shall be included in all
all copies or substantial portions of the Software. copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
THE SOFTWARE. SOFTWARE.
--------------------------------------------------------------------------------
Unless specified otherwise (see below), the above license and copyright applies
to all files in this repository.
Individual files may include additional copyright holders.
The various ports of MicroPython may include third-party software that is
licensed under different terms. These licenses are summarised in the tree
below, please refer to these files and directories for further license and
copyright information. Note that (L)GPL-licensed code listed below is only
used during the build process and is not part of the compiled source code.
/ (MIT)
/drivers
/cc3000 (BSD-3-clause)
/cc3100 (BSD-3-clause)
/wiznet5k (BSD-3-clause)
/lib
/asf4 (Apache-2.0)
/axtls (BSD-3-clause)
/config
/scripts
/config (GPL-2.0-or-later)
/Rules.mak (GPL-2.0)
/berkeley-db-1xx (BSD-4-clause)
/btstack (See btstack/LICENSE)
/cmsis (BSD-3-clause)
/crypto-algorithms (NONE)
/libhydrogen (ISC)
/littlefs (BSD-3-clause)
/lwip (BSD-3-clause)
/mynewt-nimble (Apache-2.0)
/nrfx (BSD-3-clause)
/nxp_driver (BSD-3-Clause)
/oofatfs (BSD-1-clause)
/pico-sdk (BSD-3-clause)
/re15 (BSD-3-clause)
/stm32lib (BSD-3-clause)
/tinytest (BSD-3-clause)
/tinyusb (MIT)
/uzlib (Zlib)
/logo (uses OFL-1.1)
/ports
/cc3200
/hal (BSD-3-clause)
/simplelink (BSD-3-clause)
/FreeRTOS (GPL-2.0 with FreeRTOS exception)
/stm32
/usbd*.c (MCD-ST Liberty SW License Agreement V2)
/stm32_it.* (MIT + BSD-3-clause)
/system_stm32*.c (MIT + BSD-3-clause)
/boards
/startup_stm32*.s (BSD-3-clause)
/*/stm32*.h (BSD-3-clause)
/usbdev (MCD-ST Liberty SW License Agreement V2)
/usbhost (MCD-ST Liberty SW License Agreement V2)
/teensy
/core (PJRC.COM)
/zephyr
/src (Apache-2.0)
/tools
/dfu.py (LGPL-3.0-only)

85
LICENSE_MicroPython Normal file
View File

@ -0,0 +1,85 @@
The MIT License (MIT)
Copyright (c) 2013-2022 Damien P. George
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
--------------------------------------------------------------------------------
Unless specified otherwise (see below), the above license and copyright applies
to all files in this repository.
Individual files may include additional copyright holders.
The various ports of MicroPython may include third-party software that is
licensed under different terms. These licenses are summarised in the tree
below, please refer to these files and directories for further license and
copyright information. Note that (L)GPL-licensed code listed below is only
used during the build process and is not part of the compiled source code.
/ (MIT)
/drivers
/cc3000 (BSD-3-clause)
/cc3100 (BSD-3-clause)
/wiznet5k (BSD-3-clause)
/lib
/asf4 (Apache-2.0)
/axtls (BSD-3-clause)
/config
/scripts
/config (GPL-2.0-or-later)
/Rules.mak (GPL-2.0)
/berkeley-db-1xx (BSD-4-clause)
/btstack (See btstack/LICENSE)
/cmsis (BSD-3-clause)
/crypto-algorithms (NONE)
/libhydrogen (ISC)
/littlefs (BSD-3-clause)
/lwip (BSD-3-clause)
/mynewt-nimble (Apache-2.0)
/nrfx (BSD-3-clause)
/nxp_driver (BSD-3-Clause)
/oofatfs (BSD-1-clause)
/pico-sdk (BSD-3-clause)
/re15 (BSD-3-clause)
/stm32lib (BSD-3-clause)
/tinytest (BSD-3-clause)
/tinyusb (MIT)
/uzlib (Zlib)
/logo (uses OFL-1.1)
/ports
/cc3200
/hal (BSD-3-clause)
/simplelink (BSD-3-clause)
/FreeRTOS (GPL-2.0 with FreeRTOS exception)
/stm32
/usbd*.c (MCD-ST Liberty SW License Agreement V2)
/stm32_it.* (MIT + BSD-3-clause)
/system_stm32*.c (MIT + BSD-3-clause)
/boards
/startup_stm32*.s (BSD-3-clause)
/*/stm32*.h (BSD-3-clause)
/usbdev (MCD-ST Liberty SW License Agreement V2)
/usbhost (MCD-ST Liberty SW License Agreement V2)
/teensy
/core (PJRC.COM)
/zephyr
/src (Apache-2.0)
/tools
/dfu.py (LGPL-3.0-only)

View File

@ -61,6 +61,7 @@ TRANSLATE_SOURCES_EXC = -path "ports/*/build-*" \
help: help:
@echo "Please use \`make <target>' where <target> is one of" @echo "Please use \`make <target>' where <target> is one of"
@echo " fetch-submodules to fetch dependencies from submodules, run this right after you clone the repo"
@echo " html to make standalone HTML files" @echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories" @echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file" @echo " singlehtml to make a single large HTML file"
@ -89,7 +90,7 @@ clean:
rm -rf autoapi rm -rf autoapi
rm -rf $(STUBDIR) $(DISTDIR) *.egg-info rm -rf $(STUBDIR) $(DISTDIR) *.egg-info
html: stubs html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo @echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html." @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
@ -265,7 +266,7 @@ stubs:
@cp setup.py-stubs circuitpython-stubs/setup.py @cp setup.py-stubs circuitpython-stubs/setup.py
@cp README.rst-stubs circuitpython-stubs/README.rst @cp README.rst-stubs circuitpython-stubs/README.rst
@cp MANIFEST.in-stubs circuitpython-stubs/MANIFEST.in @cp MANIFEST.in-stubs circuitpython-stubs/MANIFEST.in
@(cd circuitpython-stubs && $(PYTHON) setup.py -q sdist) @$(PYTHON) -m build circuitpython-stubs
.PHONY: check-stubs .PHONY: check-stubs
check-stubs: stubs check-stubs: stubs
@ -322,10 +323,29 @@ clean-nrf:
clean-stm: clean-stm:
$(MAKE) -C ports/stm BOARD=feather_stm32f405_express clean $(MAKE) -C ports/stm BOARD=feather_stm32f405_express clean
# If available, do blobless partial clones of submodules to save time and space.
# A blobless partial clone lazily fetches data as needed, but has all the metadata available (tags, etc.)
# so it does not have the idiosyncrasies of a shallow clone.
#
# If not available, do a fetch that will fail, and then fix it up with a second fetch.
# (Only works for git servers that allow sha fetches.)
.PHONY: fetch-submodules .PHONY: fetch-submodules
fetch-submodules: fetch-submodules:
# This update will fail because the commits we need aren't the latest on the git submodule sync
# branch. We can ignore that though because we fix it with the second command. #####################################################################################
# (Only works for git servers that allow sha fetches.) # NOTE: Ideally, use git version 2.36.0 or later, to do partial clones of submodules.
git submodule update --init -N --depth 1 || true # If an older git is used, submodules will be cloned with a shallow clone of depth 1.
git submodule foreach 'git fetch --tags --depth 1 origin $$sha1 && git checkout -q $$sha1' # You will see a git usage message first if the git version is too old to do
# clones of submodules.
#####################################################################################
git submodule update --init --filter=blob:none || git submodule update --init -N --depth 1 || git submodule foreach 'git fetch --tags --depth 1 origin $$sha1 && git checkout -q $$sha1' || echo 'make fetch-submodules FAILED'
.PHONY: remove-submodules
remove-submodules:
git submodule deinit -f --all
rm -rf .git/modules/*
.PHONY: fetch-tags
fetch-tags:
git fetch --tags --recurse-submodules=no --shallow-since="2023-02-01" https://github.com/adafruit/circuitpython HEAD

View File

@ -84,15 +84,19 @@ common set of requirements.
If you'd like to use the term "CircuitPython" and Blinka for your product here is what we ask: If you'd like to use the term "CircuitPython" and Blinka for your product here is what we ask:
* Your product is supported by the primary - Your product is supported by the primary
`"adafruit/circuitpython" <https://github.com/adafruit/circuitpython>`_ repo. This way we can `"adafruit/circuitpython" <https://github.com/adafruit/circuitpython>`_ repo. This way we can
update any custom code as we update the CircuitPython internals. update any custom code as we update the CircuitPython internals.
* Your product is listed on `circuitpython.org <https://circuitpython.org>`__ (source - Your product is listed on `circuitpython.org <https://circuitpython.org>`__ (source
`here <https://github.com/adafruit/circuitpython-org/>`_). This is to ensure that a user of your `here <https://github.com/adafruit/circuitpython-org/>`_). This is to ensure that a user of your
product can always download the latest version of CircuitPython from the standard place. product can always download the latest version of CircuitPython from the standard place.
* Your product has a user accessible USB plug which appears as a CIRCUITPY drive when plugged in - Your product supports at least one standard "`Workflow <https://docs.circuitpython.org/en/latest/docs/workflows.html>`__" for serial and file access:
AND/OR provides file and serial access over Bluetooth Low Energy. Boards that do not support USB
should be clearly marked as BLE-only CircuitPython. - With a user accessible USB plug which appears as a CIRCUITPY drive when plugged in.
- With file and serial access over Bluetooth Low Energy using the BLE Workflow.
- With file access over WiFi using the WiFi Workflow with serial access over USB and/or WebSocket.
- Boards that do not support the USB Workflow should be clearly marked.
If you choose not to meet these requirements, then we ask you call your version of CircuitPython If you choose not to meet these requirements, then we ask you call your version of CircuitPython
something else (for example, SuperDuperPython) and not use the Blinka logo. You can say it is something else (for example, SuperDuperPython) and not use the Blinka logo. You can say it is
@ -120,7 +124,7 @@ Behavior
make each file independent from each other. make each file independent from each other.
- ``boot.py`` runs only once on start up before - ``boot.py`` runs only once on start up before
USB is initialized. This lays the ground work for configuring USB at workflows are initialized. This lays the ground work for configuring USB at
startup rather than it being fixed. Since serial is not available, startup rather than it being fixed. Since serial is not available,
output is written to ``boot_out.txt``. output is written to ``boot_out.txt``.
- ``code.py`` (or ``main.py``) is run after every reload until it - ``code.py`` (or ``main.py``) is run after every reload until it
@ -134,13 +138,26 @@ Behavior
- Adds a safe mode that does not run user code after a hard crash or brown out. This makes it - Adds a safe mode that does not run user code after a hard crash or brown out. This makes it
possible to fix code that causes nasty crashes by making it available through mass storage after possible to fix code that causes nasty crashes by making it available through mass storage after
the crash. A reset (the button) is needed after it's fixed to get back into normal mode. the crash. A reset (the button) is needed after it's fixed to get back into normal mode.
- Safe mode may be handled programmatically by providing a ``safemode.py``.
``safemode.py`` is run if the board has reset due to entering safe mode, unless the safe mode
initiated by the user by pressing button(s).
USB is not available so nothing can be printed.
``safemode.py`` can determine why the safe mode occurred
using ``supervisor.runtime.safe_mode_reason``, and take appropriate action. For instance,
if a hard crash occurred, ``safemode.py`` may do a ``microcontroller.reset()``
to automatically restart despite the crash.
If the battery is low, but is being charged, ``safemode.py`` may put the board in deep sleep
for a while. Or it may simply reset, and have ``code.py`` check the voltage and do the sleep.
- RGB status LED indicating CircuitPython state. - RGB status LED indicating CircuitPython state.
- Re-runs ``code.py`` or other main file after file system writes over USB mass storage. (Disable with - One green flash - code completed without error.
- Two red flashes - code ended due to an exception.
- Three yellow flashes - safe mode. May be due to CircuitPython internal error.
- Re-runs ``code.py`` or other main file after file system writes by a workflow. (Disable with
``supervisor.disable_autoreload()``) ``supervisor.disable_autoreload()``)
- Autoreload is disabled while the REPL is active. - Autoreload is disabled while the REPL is active.
- Main is one of these: ``code.txt``, ``code.py``, ``main.py``, - ``code.py`` may also be named``code.txt``, ``main.py``, or ``main.txt``.
``main.txt`` - ``boot.py`` may also be named ``boot.txt``.
- Boot is one of these: ``boot.py``, ``boot.txt`` - ``safemode.py`` may also be named ``safemode.txt``.
API API
~~~ ~~~
@ -213,7 +230,7 @@ Supported Support status
================ ============================================================ ================ ============================================================
atmel-samd ``SAMD21`` stable | ``SAMD51`` stable atmel-samd ``SAMD21`` stable | ``SAMD51`` stable
cxd56 stable cxd56 stable
espressif ``ESP32-C3`` beta | ``ESP32-S2`` stable | ``ESP32-S3`` beta espressif ``ESP32`` beta | ``ESP32-C3`` beta | ``ESP32-S2`` stable | ``ESP32-S3`` beta
litex alpha litex alpha
mimxrt10xx alpha mimxrt10xx alpha
nrf stable nrf stable

17
conf.py
View File

@ -52,10 +52,14 @@ subprocess.check_output(["make", "stubs"])
#modules_support_matrix = shared_bindings_matrix.support_matrix_excluded_boards() #modules_support_matrix = shared_bindings_matrix.support_matrix_excluded_boards()
modules_support_matrix = shared_bindings_matrix.support_matrix_by_board() modules_support_matrix = shared_bindings_matrix.support_matrix_by_board()
modules_support_matrix_reverse = defaultdict(list) modules_support_matrix_reverse = defaultdict(list)
for board, modules in modules_support_matrix.items(): for board, matrix_info in modules_support_matrix.items():
for module in modules: for module in matrix_info["modules"]:
modules_support_matrix_reverse[module].append(board) modules_support_matrix_reverse[module].append(board)
modules_support_matrix_reverse = dict((module, sorted(boards)) for module, boards in modules_support_matrix_reverse.items())
modules_support_matrix_reverse = dict(
(module, sorted(boards))
for module, boards in modules_support_matrix_reverse.items()
)
html_context = { html_context = {
'support_matrix': modules_support_matrix, 'support_matrix': modules_support_matrix,
@ -73,6 +77,7 @@ needs_sphinx = '1.3'
extensions = [ extensions = [
'sphinx.ext.autodoc', 'sphinx.ext.autodoc',
'sphinx.ext.doctest', 'sphinx.ext.doctest',
"sphinxcontrib.jquery",
'sphinxcontrib.rsvgconverter', 'sphinxcontrib.rsvgconverter',
'sphinx.ext.intersphinx', 'sphinx.ext.intersphinx',
'sphinx.ext.todo', 'sphinx.ext.todo',
@ -83,7 +88,7 @@ extensions = [
] ]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['templates'] templates_path = ['templates', "docs/templates"]
# The suffix of source filenames. # The suffix of source filenames.
source_suffix = { source_suffix = {
@ -167,6 +172,7 @@ exclude_patterns = ["**/build*",
".env", ".env",
".venv", ".venv",
".direnv", ".direnv",
".devcontainer/Readme.md",
"data", "data",
"docs/autoapi", "docs/autoapi",
"docs/README.md", "docs/README.md",
@ -195,6 +201,7 @@ exclude_patterns = ["**/build*",
"ports/cxd56/spresense-exported-sdk", "ports/cxd56/spresense-exported-sdk",
"ports/espressif/certificates", "ports/espressif/certificates",
"ports/espressif/esp-idf", "ports/espressif/esp-idf",
"ports/espressif/esp32-camera",
"ports/espressif/.idf_tools", "ports/espressif/.idf_tools",
"ports/espressif/peripherals", "ports/espressif/peripherals",
"ports/litex/hw", "ports/litex/hw",
@ -208,6 +215,8 @@ exclude_patterns = ["**/build*",
"ports/nrf/peripherals", "ports/nrf/peripherals",
"ports/nrf/usb", "ports/nrf/usb",
"ports/raspberrypi/sdk", "ports/raspberrypi/sdk",
"ports/raspberrypi/lib",
"ports/silabs",
"ports/stm/st_driver", "ports/stm/st_driver",
"ports/stm/packages", "ports/stm/packages",
"ports/stm/peripherals", "ports/stm/peripherals",

@ -1 +1 @@
Subproject commit 266ea20ed80104c315dcb124b482fa5f9f48cdec Subproject commit 427cc923976229bcb981ca6f218ebe8efd636df6

View File

@ -49,6 +49,10 @@
#include "shared-bindings/_bleio/ScanEntry.h" #include "shared-bindings/_bleio/ScanEntry.h"
#include "shared-bindings/time/__init__.h" #include "shared-bindings/time/__init__.h"
#if CIRCUITPY_OS_GETENV
#include "shared-bindings/os/__init__.h"
#endif
#define MSEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000) / (RESOLUTION)) #define MSEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000) / (RESOLUTION))
#define SEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000000) / (RESOLUTION)) #define SEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000000) / (RESOLUTION))
#define UNITS_TO_SEC(TIME, RESOLUTION) (((TIME)*(RESOLUTION)) / 1000000) #define UNITS_TO_SEC(TIME, RESOLUTION) (((TIME)*(RESOLUTION)) / 1000000)
@ -278,17 +282,27 @@ char default_ble_name[] = { 'C', 'I', 'R', 'C', 'U', 'I', 'T', 'P', 'Y', 0, 0, 0
// Get various values and limits set by the adapter. // Get various values and limits set by the adapter.
// Set event mask. // Set event mask.
STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) { STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) {
mp_int_t name_len = 0;
const size_t len = sizeof(default_ble_name); #if CIRCUITPY_OS_GETENV
mp_obj_t name = common_hal_os_getenv("CIRCUITPY_BLE_NAME", mp_const_none);
if (name != mp_const_none) {
mp_arg_validate_type_string(name, MP_QSTR_CIRCUITPY_BLE_NAME);
self->name = name;
}
#endif
if (!self->name) {
name_len = sizeof(default_ble_name);
bt_addr_t addr; bt_addr_t addr;
hci_check_error(hci_read_bd_addr(&addr)); hci_check_error(hci_read_bd_addr(&addr));
default_ble_name[len - 4] = nibble_to_hex_lower[addr.val[1] >> 4 & 0xf]; default_ble_name[name_len - 4] = nibble_to_hex_lower[addr.val[1] >> 4 & 0xf];
default_ble_name[len - 3] = nibble_to_hex_lower[addr.val[1] & 0xf]; default_ble_name[name_len - 3] = nibble_to_hex_lower[addr.val[1] & 0xf];
default_ble_name[len - 2] = nibble_to_hex_lower[addr.val[0] >> 4 & 0xf]; default_ble_name[name_len - 2] = nibble_to_hex_lower[addr.val[0] >> 4 & 0xf];
default_ble_name[len - 1] = nibble_to_hex_lower[addr.val[0] & 0xf]; default_ble_name[name_len - 1] = nibble_to_hex_lower[addr.val[0] & 0xf];
self->name = mp_obj_new_str(default_ble_name, len); self->name = mp_obj_new_str(default_ble_name, (uint8_t)name_len);
}
// Get version information. // Get version information.
if (hci_read_local_version(&self->hci_version, &self->hci_revision, &self->lmp_version, if (hci_read_local_version(&self->hci_version, &self->hci_revision, &self->lmp_version,
@ -469,7 +483,7 @@ mp_obj_t common_hal_bleio_adapter_start_scan(bleio_adapter_obj_t *self, uint8_t
if (self->scan_results != NULL) { if (self->scan_results != NULL) {
if (!shared_module_bleio_scanresults_get_done(self->scan_results)) { if (!shared_module_bleio_scanresults_get_done(self->scan_results)) {
mp_raise_bleio_BluetoothError(translate("Scan already in progess. Stop with stop_scan.")); mp_raise_bleio_BluetoothError(translate("Scan already in progress. Stop with stop_scan."));
} }
self->scan_results = NULL; self->scan_results = NULL;
} }

View File

@ -65,7 +65,7 @@ typedef struct _bleio_adapter_obj_t {
uint16_t manufacturer; uint16_t manufacturer;
uint16_t lmp_subversion; uint16_t lmp_subversion;
// Used to monitor advertising timeout for legacy avertising. // Used to monitor advertising timeout for legacy advertising.
uint64_t advertising_start_ticks; uint64_t advertising_start_ticks;
uint64_t advertising_timeout_msecs; // If zero, do not check. uint64_t advertising_timeout_msecs; // If zero, do not check.

View File

@ -57,9 +57,9 @@ void common_hal_bleio_characteristic_construct(bleio_characteristic_obj_t *self,
self->value = mp_obj_new_bytes(initial_value_bufinfo->buf, initial_value_bufinfo->len); self->value = mp_obj_new_bytes(initial_value_bufinfo->buf, initial_value_bufinfo->len);
const mp_int_t max_length_max = 512; const mp_int_t max_length_max = 512;
if (max_length < 0 || max_length > max_length_max) {
mp_raise_ValueError(translate("max_length must be <= 512")); mp_arg_validate_int_range(max_length, 0, max_length_max, MP_QSTR_max_length);
}
self->max_length = max_length; self->max_length = max_length;
self->fixed_length = fixed_length; self->fixed_length = fixed_length;

View File

@ -93,6 +93,7 @@ bool common_hal_bleio_characteristic_buffer_deinited(bleio_characteristic_buffer
void common_hal_bleio_characteristic_buffer_deinit(bleio_characteristic_buffer_obj_t *self) { void common_hal_bleio_characteristic_buffer_deinit(bleio_characteristic_buffer_obj_t *self) {
if (!common_hal_bleio_characteristic_buffer_deinited(self)) { if (!common_hal_bleio_characteristic_buffer_deinited(self)) {
bleio_characteristic_clear_observer(self->characteristic); bleio_characteristic_clear_observer(self->characteristic);
ringbuf_deinit(&self->ringbuf);
} }
} }

View File

@ -515,7 +515,7 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
// (gattc_char->char_props.write ? CHAR_PROP_WRITE : 0) | // (gattc_char->char_props.write ? CHAR_PROP_WRITE : 0) |
// (gattc_char->char_props.write_wo_resp ? CHAR_PROP_WRITE_NO_RESPONSE : 0); // (gattc_char->char_props.write_wo_resp ? CHAR_PROP_WRITE_NO_RESPONSE : 0);
// // Call common_hal_bleio_characteristic_construct() to initalize some fields and set up evt handler. // // Call common_hal_bleio_characteristic_construct() to initialize some fields and set up evt handler.
// common_hal_bleio_characteristic_construct( // common_hal_bleio_characteristic_construct(
// characteristic, m_char_discovery_service, gattc_char->handle_value, uuid, // characteristic, m_char_discovery_service, gattc_char->handle_value, uuid,
// props, SECURITY_MODE_OPEN, SECURITY_MODE_OPEN, // props, SECURITY_MODE_OPEN, SECURITY_MODE_OPEN,

View File

@ -37,13 +37,13 @@
#include "supervisor/shared/tick.h" #include "supervisor/shared/tick.h"
STATIC void write_to_ringbuf(bleio_packet_buffer_obj_t *self, uint8_t *data, uint16_t len) { STATIC void write_to_ringbuf(bleio_packet_buffer_obj_t *self, uint8_t *data, uint16_t len) {
if (len + sizeof(uint16_t) > ringbuf_capacity(&self->ringbuf)) { if (len + sizeof(uint16_t) > ringbuf_size(&self->ringbuf)) {
// This shouldn't happen. // This shouldn't happen.
return; return;
} }
// Push all the data onto the ring buffer. // Push all the data onto the ring buffer.
// Make room for the new value by dropping the oldest packets first. // Make room for the new value by dropping the oldest packets first.
while (ringbuf_capacity(&self->ringbuf) - ringbuf_num_filled(&self->ringbuf) < len + sizeof(uint16_t)) { while (ringbuf_size(&self->ringbuf) - ringbuf_num_filled(&self->ringbuf) < len + sizeof(uint16_t)) {
uint16_t packet_length; uint16_t packet_length;
ringbuf_get_n(&self->ringbuf, (uint8_t *)&packet_length, sizeof(uint16_t)); ringbuf_get_n(&self->ringbuf, (uint8_t *)&packet_length, sizeof(uint16_t));
for (uint16_t i = 0; i < packet_length; i++) { for (uint16_t i = 0; i < packet_length; i++) {
@ -264,5 +264,6 @@ bool common_hal_bleio_packet_buffer_deinited(bleio_packet_buffer_obj_t *self) {
void common_hal_bleio_packet_buffer_deinit(bleio_packet_buffer_obj_t *self) { void common_hal_bleio_packet_buffer_deinit(bleio_packet_buffer_obj_t *self) {
if (!common_hal_bleio_packet_buffer_deinited(self)) { if (!common_hal_bleio_packet_buffer_deinited(self)) {
bleio_characteristic_clear_observer(self->characteristic); bleio_characteristic_clear_observer(self->characteristic);
ringbuf_deinit(&self->ringbuf);
} }
} }

View File

@ -57,6 +57,11 @@ bool vm_used_ble;
// } // }
// } // }
void bleio_user_reset() {
// HCI doesn't support the BLE workflow so just do a full reset.
bleio_reset();
}
// Turn off BLE on a reset or reload. // Turn off BLE on a reset or reload.
void bleio_reset() { void bleio_reset() {
// Create a UUID object for all CCCD's. // Create a UUID object for all CCCD's.

View File

@ -44,6 +44,7 @@
#include "shared-bindings/_bleio/Service.h" #include "shared-bindings/_bleio/Service.h"
#include "shared-bindings/_bleio/UUID.h" #include "shared-bindings/_bleio/UUID.h"
#include "supervisor/shared/tick.h" #include "supervisor/shared/tick.h"
#include "supervisor/shared/translate/translate.h"
STATIC uint16_t max_mtu = BT_ATT_DEFAULT_LE_MTU; // 23 STATIC uint16_t max_mtu = BT_ATT_DEFAULT_LE_MTU; // 23
STATIC unsigned long timeout = 5000; STATIC unsigned long timeout = 5000;
@ -95,7 +96,7 @@ STATIC uint8_t bleio_properties_to_ble_spec_properties(uint8_t bleio_properties)
return ble_spec_properties; return ble_spec_properties;
} }
// FIX not currently used; reenable when used. // FIX not currently used; re-enable when used.
#if 0 #if 0
STATIC uint8_t ble_spec_properties_to_bleio_properties(uint8_t ble_spec_properties) { STATIC uint8_t ble_spec_properties_to_bleio_properties(uint8_t ble_spec_properties) {
uint8_t bleio_properties = 0; uint8_t bleio_properties = 0;
@ -963,7 +964,7 @@ static void process_read_group_req(uint16_t conn_handle, uint16_t mtu, uint8_t d
// Keep track of the first one to make sure. // Keep track of the first one to make sure.
size_t sizeof_first_service_uuid = 0; size_t sizeof_first_service_uuid = 0;
// Size of a single bt_att_group_data chunk. Start with the intial size, and // Size of a single bt_att_group_data chunk. Start with the initial size, and
// add the uuid size in the loop below. // add the uuid size in the loop below.
size_t data_length = sizeof(struct bt_att_group_data); size_t data_length = sizeof(struct bt_att_group_data);

View File

@ -150,7 +150,7 @@ struct bt_att_read_mult_req {
uint16_t handles[]; uint16_t handles[];
} __packed; } __packed;
/* Read Multiple Respose */ /* Read Multiple Response */
#define BT_ATT_OP_READ_MULT_RSP 0x0f #define BT_ATT_OP_READ_MULT_RSP 0x0f
struct bt_att_read_mult_rsp { struct bt_att_read_mult_rsp {
uint8_t _dummy[0]; uint8_t _dummy[0];
@ -243,7 +243,7 @@ struct bt_att_read_mult_vl_req {
uint16_t handles[]; uint16_t handles[];
} __packed; } __packed;
/* Read Multiple Respose */ /* Read Multiple Response */
#define BT_ATT_OP_READ_MULT_VL_RSP 0x21 #define BT_ATT_OP_READ_MULT_VL_RSP 0x21
struct bt_att_read_mult_vl_rsp { struct bt_att_read_mult_vl_rsp {
uint16_t len; uint16_t len;

View File

@ -1,9 +1,8 @@
MicroPython & CircuitPython license information # MicroPython & CircuitPython License
===============================================
The MIT License (MIT) MIT License
Copyright (c) 2013-2017 Damien P. George, and others Copyright (c) 2013-2022 Damien P. George and others
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal
@ -12,13 +11,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in The above copyright notice and this permission notice shall be included in all
all copies or substantial portions of the Software. copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
THE SOFTWARE. SOFTWARE.

View File

@ -141,7 +141,7 @@ statement will ensure hardware isn't enabled longer than needed.
Verify your device Verify your device
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
Whenever possible, make sure device you are talking to is the device you expect. Whenever possible, make sure the device you are talking to is the device you expect.
If not, raise a RuntimeError. Beware that I2C addresses can be identical on If not, raise a RuntimeError. Beware that I2C addresses can be identical on
different devices so read registers you know to make sure they match your different devices so read registers you know to make sure they match your
expectation. Validating this upfront will help catch mistakes. expectation. Validating this upfront will help catch mistakes.
@ -202,10 +202,10 @@ interchangeably with the CPython name. This is confusing. Instead, think up a
new name that is related to the extra functionality you are adding. new name that is related to the extra functionality you are adding.
For example, storage mounting and unmounting related functions were moved from For example, storage mounting and unmounting related functions were moved from
``uos`` into a new `storage` module. Terminal related functions were moved into ``uos`` into a new `storage` module. These names better match their
`multiterminal`. These names better match their functionality and do not functionality and do not conflict with CPython names. Make sure to check that
conflict with CPython names. Make sure to check that you don't conflict with you don't conflict with CPython libraries too. That way we can port the API to
CPython libraries too. That way we can port the API to CPython in the future. CPython in the future.
Example Example
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -213,7 +213,7 @@ Example
When adding extra functionality to CircuitPython to mimic what a normal When adding extra functionality to CircuitPython to mimic what a normal
operating system would do, either copy an existing CPython API (for example file operating system would do, either copy an existing CPython API (for example file
writing) or create a separate module to achieve what you want. For example, writing) or create a separate module to achieve what you want. For example,
mounting and unmount drives is not a part of CPython so it should be done in a mounting and unmounting drives is not a part of CPython so it should be done in a
module, such as a new ``storage`` module, that is only available in CircuitPython. module, such as a new ``storage`` module, that is only available in CircuitPython.
That way when someone moves the code to CPython they know what parts need to be That way when someone moves the code to CPython they know what parts need to be
adapted. adapted.
@ -267,6 +267,14 @@ After the license comment::
""" """
Version description
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
After the import statements::
__version__ = "0.0.0+auto.0"
__repo__ = "<repo github link>"
Class description Class description
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -309,7 +317,7 @@ following structure:
param_type param_type
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The type of the parameter. This could be among other `int`, `float`, `str` `bool`, etc. The type of the parameter. This could be, among others, ``int``, ``float``, ``str``, ``bool``, etc.
To document an object in the CircuitPython domain, you need to include a ``~`` before the To document an object in the CircuitPython domain, you need to include a ``~`` before the
definition as shown in the following example: definition as shown in the following example:
@ -494,6 +502,45 @@ backticks ``:class:`~adafruit_motor.servo.Servo```. You must also add the refer
"adafruit_motor": ("https://circuitpython.readthedocs.io/projects/motor/en/latest/", None,), "adafruit_motor": ("https://circuitpython.readthedocs.io/projects/motor/en/latest/", None,),
Use ``adafruit_register`` when possible
--------------------------------------------------------------------------------
`Register <https://github.com/adafruit/Adafruit_CircuitPython_Register>`_ is
a foundational library that manages packing and unpacking data from I2C device
registers. There is also `Register SPI <https://github.com/adafruit/Adafruit_CircuitPython_Register_SPI>`_
for SPI devices. When possible, use one of these libraries for unpacking and
packing registers. This ensures the packing code is shared amongst all
registers (even across drivers). Furthermore, it simplifies device definitions
by making them declarative (only data.)
Values with non-consecutive bits in a register or that represent FIFO endpoints
may not map well to existing register classes. In unique cases like these, it is
ok to read and write the register directly.
*Do not* add all registers from a datasheet upfront. Instead, only add the ones
necessary for the functionality the driver exposes. Adding them all will lead to
unnecessary file size and API clutter. See `this video about outside-in design
from @tannewt <https://www.youtube.com/watch?v=3QewiyfBQh8>`_.
I2C Example
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. code-block:: python
from adafruit_register import i2c_bit
from adafruit_bus_device import i2c_device
class HelloWorldDevice:
"""Device with two bits to control when the words 'hello' and 'world' are lit."""
hello = i2c_bit.RWBit(0x0, 0x0)
"""Bit to indicate if hello is lit."""
world = i2c_bit.RWBit(0x1, 0x0)
"""Bit to indicate if world is lit."""
def __init__(self, i2c, device_address=0x0):
self.i2c_device = i2c_device.I2CDevice(i2c, device_address)
Use BusDevice Use BusDevice
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
@ -668,8 +715,24 @@ when using ``const()``, keep in mind these general guide lines:
- Always use via an import, ex: ``from micropython import const`` - Always use via an import, ex: ``from micropython import const``
- Limit use to global (module level) variables only. - Limit use to global (module level) variables only.
- If user will not need access to variable, prefix name with a leading - Only used when the user will not need access to variable and prefix name with
underscore, ex: ``_SOME_CONST``. a leading underscore, ex: ``_SOME_CONST``.
Example
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. code-block:: python
from adafruit_bus_device import i2c_device
from micropython import const
_DEFAULT_I2C_ADDR = const(0x42)
class Widget:
"""A generic widget."""
def __init__(self, i2c, address=_DEFAULT_I2C_ADDR):
self.i2c_device = i2c_device.I2CDevice(i2c, address)
Libraries Examples Libraries Examples
------------------ ------------------
@ -679,14 +742,7 @@ You could other examples if needed featuring different
functionalities of the library. functionalities of the library.
If you add additional examples, be sure to include them in the ``examples.rst``. Naming of the examples If you add additional examples, be sure to include them in the ``examples.rst``. Naming of the examples
files should use the name of the library followed by a description, using underscore to separate them. files should use the name of the library followed by a description, using underscore to separate them.
When using print statements you should use the ``" ".format()`` format, as there are particular boards
that are not capable to use f-strings.
.. code-block:: python
text_to_display = "World!"
print("Hello {}".format(text_to_display))
Sensor properties and units Sensor properties and units
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------
@ -751,6 +807,16 @@ properties.
| ``sound_level`` | float | non-unit-specific sound level (monotonic but not actual decibels) | | ``sound_level`` | float | non-unit-specific sound level (monotonic but not actual decibels) |
+-----------------------+-----------------------+-------------------------------------------------------------------------+ +-----------------------+-----------------------+-------------------------------------------------------------------------+
Driver constant naming
--------------------------------------------------------------------------------
When adding variables for constant values for a driver. Do not include the
device's name in the variable name. For example, in ``adafruit_fancy123.py``,
variables should not start with ``FANCY123_``. Adding this prefix increases RAM
usage and .mpy file size because variable names are preserved. User code should
refer to these constants as ``adafruit_fancy123.HELLO_WORLD`` for clarity.
``adafruit_fancy123.FANCY123_HELLO_WORLD`` would be overly verbose.
Adding native modules Adding native modules
-------------------------------------------------------------------------------- --------------------------------------------------------------------------------

View File

@ -1,33 +0,0 @@
Additional CircuitPython Libraries and Drivers on GitHub
=========================================================
These are libraries and drivers available in separate GitHub repos. They are
designed for use with CircuitPython and may or may not work with
`MicroPython <https://micropython.org>`_.
Adafruit CircuitPython Library Bundle
--------------------------------------
We provide a bundle of all our libraries to ease installation of drivers and
their dependencies. The bundle is primarily geared to the Adafruit Express line
of boards which feature a relatively large external flash. With Express boards,
it's easy to copy them all onto the filesystem. However, if you don't have
enough space simply copy things over as they are needed.
- The Adafruit bundles are available on GitHub: <https://github.com/adafruit/Adafruit_CircuitPython_Bundle/releases>.
- Documentation for the bundle, which includes links to documentation for all
libraries, is available here: <https://circuitpython.readthedocs.io/projects/bundle/en/latest/>.
CircuitPython Community Library Bundle
---------------------------------------
This bundle contains non-Adafruit sponsored libraries, that are written and submitted
by members of the community.
- The Community bundles are available on GitHub: <https://github.com/adafruit/CircuitPython_Community_Bundle/releases>.
- Documentation is not available on ReadTheDocs at this time. See each library for any
included documentation.

93
docs/environment.rst Normal file
View File

@ -0,0 +1,93 @@
Environment Variables
=====================
CircuitPython 8.0.0 introduces support for environment variables. Environment
variables are commonly used to store "secrets" such as Wi-Fi passwords and API
keys. This method *does not* make them secure. It only separates them from the
code.
CircuitPython uses a file called ``settings.toml`` at the drive root (no
folder) as the environment. User code can access the values from the file
using `os.getenv()`. It is recommended to save any values used repeatedly in a
variable because `os.getenv()` will parse the ``settings.toml`` file contents
on every access.
CircuitPython only supports a subset of the full toml specification, see below
for more details. The subset is very "Python-like", which is a key reason we
selected the format.
Due to technical limitations it probably also accepts some files that are
not valid TOML files; bugs of this nature are subject to change (i.e., be
fixed) without the usual deprecation period for incompatible changes.
File format example:
.. code-block::
str_key="Hello world" # with trailing comment
int_key = 7
unicode_key="œuvre"
unicode_key2="\\u0153uvre" # same as above
unicode_key3="\\U00000153uvre" # same as above
escape_codes="supported, including \\r\\n\\"\\\\"
# comment
[subtable]
subvalue="cannot retrieve this using getenv"
Details of the toml language subset
-----------------------------------
* The content is required to be in UTF-8 encoding
* The supported data types are string and integer
* Only basic strings are supported, not triple-quoted strings
* Only integers supported by strtol. (no 0o, no 0b, no underscores 1_000, 011
is 9, not 11)
* Only bare keys are supported
* Duplicate keys are not diagnosed.
* Comments are supported
* Only values from the "root table" can be retrieved
* due to technical limitations, the content of multi-line
strings can erroneously be parsed as a value.
CircuitPython behavior
----------------------
CircuitPython will also read the environment to configure its behavior. Other
keys are ignored by CircuitPython. Here are the keys it uses:
CIRCUITPY_BLE_NAME
~~~~~~~~~~~~~~~~~~
Default BLE name the board advertises as, including for the BLE workflow.
CIRCUITPY_PYSTACK_SIZE
~~~~~~~~~~~~~~~~~~~~~~
Sets the size of the python stack. Must be a multiple of 4. The default value is currently 1536.
Increasing the stack reduces the size of the heap available to python code.
Used to avoid "Pystack exhausted" errors when the code can't be reworked to avoid it.
CIRCUITPY_RESERVED_PSRAM
~~~~~~~~~~~~~~~~~~~~~~~~
On boards with Espressif microcontrollers with PSRAM (also called SPIRAM), permanently reserve a portion of PSRAM for use by esp-idf.
This storage is removed from the CircuitPython "heap" and is available for allocation by esp-idf routines in the core instead.
Generally, only set this to a non-zero value when it is required by a specific core module.
CIRCUITPY_WEB_API_PASSWORD
~~~~~~~~~~~~~~~~~~~~~~~~~~
Password required to make modifications to the board from the Web Workflow.
CIRCUITPY_WEB_API_PORT
~~~~~~~~~~~~~~~~~~~~~~
TCP port number used for the web HTTP API. Defaults to 80 when omitted.
CIRCUITPY_WEB_INSTANCE_NAME
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Name the board advertises as for the WEB workflow. Defaults to human readable board name if omitted.
CIRCUITPY_WIFI_PASSWORD
~~~~~~~~~~~~~~~~~~~~~~~
Wi-Fi password used to auto connect to CIRCUITPY_WIFI_SSID.
CIRCUITPY_WIFI_SSID
~~~~~~~~~~~~~~~~~~~
Wi-Fi SSID to auto-connect to even if user code is not running.

View File

@ -21,7 +21,9 @@ Full Table of Contents
../shared-bindings/index.rst ../shared-bindings/index.rst
supported_ports.rst supported_ports.rst
troubleshooting.rst troubleshooting.rst
drivers.rst libraries.rst
workflows
environment.rst
.. toctree:: .. toctree::
:maxdepth: 1 :maxdepth: 1
@ -46,7 +48,7 @@ Full Table of Contents
../CONTRIBUTING ../CONTRIBUTING
../BUILDING ../BUILDING
../CODE_OF_CONDUCT ../CODE_OF_CONDUCT
../license.rst ../docs/LICENSE
../WEBUSB_README ../WEBUSB_README
Indices and tables Indices and tables

31
docs/libraries.rst Normal file
View File

@ -0,0 +1,31 @@
Adafruit CircuitPython Libraries
================================
Documentation for all Adafruit-sponsored CircuitPython libraries is at:
<https://docs.circuitpython.org/projects/bundle/en/latest/drivers.html>.
CircuitPython Library Bundles
=============================
Many Python libraries, including device drivers, have been written for use with CircuitPython.
They are maintained in separate GitHub repos, one per library.
Libraries are packaged in *bundles*, which are ZIP files that are snapshots in time of a group of libraries.
Adafruit sponsors and maintains several hundred libraries, packaged in the **Adafruit Library Bundle**.
Adafruit-sponsored libraries are also available on <https://pypi.org>.
Yet other libraries are maintained by members of the CircuitPython community,
and are packaged in the **CircuitPython Community Library Bundle**.
The Adafruit bundles are available on GitHub: <https://github.com/adafruit/Adafruit_CircuitPython_Bundle/releases>.
The Community bundles are available at: <https://github.com/adafruit/CircuitPython_Community_Bundle/releases>.
More detailed information about the bundles, and download links for the latest bundles
are at <https://circuitpython.org/libraries>.
Documentation about bundle construction is at: <https://circuitpython.readthedocs.io/projects/bundle/en/latest/>.
Documentation for Community Libraries is not available on ReadTheDocs at this time. See the GitHub repository
for each library for any included documentation.

View File

@ -1,323 +0,0 @@
:mod:`uasyncio` --- asynchronous I/O scheduler
==============================================
.. module:: uasyncio
:synopsis: asynchronous I/O scheduler for writing concurrent code
|see_cpython_module|
`asyncio <https://docs.python.org/3.8/library/asyncio.html>`_
Example::
import uasyncio
async def blink(led, period_ms):
while True:
led.on()
await uasyncio.sleep_ms(5)
led.off()
await uasyncio.sleep_ms(period_ms)
async def main(led1, led2):
uasyncio.create_task(blink(led1, 700))
uasyncio.create_task(blink(led2, 400))
await uasyncio.sleep_ms(10_000)
# Running on a pyboard
from pyb import LED
uasyncio.run(main(LED(1), LED(2)))
# Running on a generic board
from machine import Pin
uasyncio.run(main(Pin(1), Pin(2)))
Core functions
--------------
.. function:: create_task(coro)
Create a new task from the given coroutine and schedule it to run.
Returns the corresponding `Task` object.
.. function:: current_task()
Return the `Task` object associated with the currently running task.
.. function:: run(coro)
Create a new task from the given coroutine and run it until it completes.
Returns the value returned by *coro*.
.. function:: sleep(t)
Sleep for *t* seconds (can be a float).
This is a coroutine.
.. function:: sleep_ms(t)
Sleep for *t* milliseconds.
This is a coroutine, and a MicroPython extension.
Additional functions
--------------------
.. function:: wait_for(awaitable, timeout)
Wait for the *awaitable* to complete, but cancel it if it takes longer
that *timeout* seconds. If *awaitable* is not a task then a task will be
created from it.
If a timeout occurs, it cancels the task and raises ``asyncio.TimeoutError``:
this should be trapped by the caller.
Returns the return value of *awaitable*.
This is a coroutine.
.. function:: wait_for_ms(awaitable, timeout)
Similar to `wait_for` but *timeout* is an integer in milliseconds.
This is a coroutine, and a MicroPython extension.
.. function:: gather(*awaitables, return_exceptions=False)
Run all *awaitables* concurrently. Any *awaitables* that are not tasks are
promoted to tasks.
Returns a list of return values of all *awaitables*.
This is a coroutine.
class Task
----------
.. class:: Task()
This object wraps a coroutine into a running task. Tasks can be waited on
using ``await task``, which will wait for the task to complete and return
the return value of the task.
Tasks should not be created directly, rather use `create_task` to create them.
.. method:: Task.cancel()
Cancel the task by injecting a ``CancelledError`` into it. The task may
or may not ignore this exception.
class Event
-----------
.. class:: Event()
Create a new event which can be used to synchronise tasks. Events start
in the cleared state.
.. method:: Event.is_set()
Returns ``True`` if the event is set, ``False`` otherwise.
.. method:: Event.set()
Set the event. Any tasks waiting on the event will be scheduled to run.
.. method:: Event.clear()
Clear the event.
.. method:: Event.wait()
Wait for the event to be set. If the event is already set then it returns
immediately.
This is a coroutine.
class Lock
----------
.. class:: Lock()
Create a new lock which can be used to coordinate tasks. Locks start in
the unlocked state.
In addition to the methods below, locks can be used in an ``async with`` statement.
.. method:: Lock.locked()
Returns ``True`` if the lock is locked, otherwise ``False``.
.. method:: Lock.acquire()
Wait for the lock to be in the unlocked state and then lock it in an atomic
way. Only one task can acquire the lock at any one time.
This is a coroutine.
.. method:: Lock.release()
Release the lock. If any tasks are waiting on the lock then the next one in the
queue is scheduled to run and the lock remains locked. Otherwise, no tasks are
waiting an the lock becomes unlocked.
TCP stream connections
----------------------
.. function:: open_connection(host, port)
Open a TCP connection to the given *host* and *port*. The *host* address will be
resolved using `socket.getaddrinfo`, which is currently a blocking call.
Returns a pair of streams: a reader and a writer stream.
Will raise a socket-specific ``OSError`` if the host could not be resolved or if
the connection could not be made.
This is a coroutine.
.. function:: start_server(callback, host, port, backlog=5)
Start a TCP server on the given *host* and *port*. The *callback* will be
called with incoming, accepted connections, and be passed 2 arguments: reader
and writer streams for the connection.
Returns a `Server` object.
This is a coroutine.
.. class:: Stream()
This represents a TCP stream connection. To minimise code this class implements
both a reader and a writer, and both ``StreamReader`` and ``StreamWriter`` alias to
this class.
.. method:: Stream.get_extra_info(v)
Get extra information about the stream, given by *v*. The valid values for *v* are:
``peername``.
.. method:: Stream.close()
Close the stream.
.. method:: Stream.wait_closed()
Wait for the stream to close.
This is a coroutine.
.. method:: Stream.read(n)
Read up to *n* bytes and return them.
This is a coroutine.
.. method:: Stream.readinto(buf)
Read up to n bytes into *buf* with n being equal to the length of *buf*.
Return the number of bytes read into *buf*.
This is a coroutine, and a MicroPython extension.
.. method:: Stream.readexactly(n)
Read exactly *n* bytes and return them as a bytes object.
Raises an ``EOFError`` exception if the stream ends before reading *n* bytes.
This is a coroutine.
.. method:: Stream.readline()
Read a line and return it.
This is a coroutine.
.. method:: Stream.write(buf)
Accumulated *buf* to the output buffer. The data is only flushed when
`Stream.drain` is called. It is recommended to call `Stream.drain` immediately
after calling this function.
.. method:: Stream.drain()
Drain (write) all buffered output data out to the stream.
This is a coroutine.
.. class:: Server()
This represents the server class returned from `start_server`. It can be used
in an ``async with`` statement to close the server upon exit.
.. method:: Server.close()
Close the server.
.. method:: Server.wait_closed()
Wait for the server to close.
This is a coroutine.
Event Loop
----------
.. function:: get_event_loop()
Return the event loop used to schedule and run tasks. See `Loop`.
.. function:: new_event_loop()
Reset the event loop and return it.
Note: since MicroPython only has a single event loop this function just
resets the loop's state, it does not create a new one.
.. class:: Loop()
This represents the object which schedules and runs tasks. It cannot be
created, use `get_event_loop` instead.
.. method:: Loop.create_task(coro)
Create a task from the given *coro* and return the new `Task` object.
.. method:: Loop.run_forever()
Run the event loop until `stop()` is called.
.. method:: Loop.run_until_complete(awaitable)
Run the given *awaitable* until it completes. If *awaitable* is not a task
then it will be promoted to one.
.. method:: Loop.stop()
Stop the event loop.
.. method:: Loop.close()
Close the event loop.
.. method:: Loop.set_exception_handler(handler)
Set the exception handler to call when a Task raises an exception that is not
caught. The *handler* should accept two arguments: ``(loop, context)``.
.. method:: Loop.get_exception_handler()
Get the current exception handler. Returns the handler, or ``None`` if no
custom handler is set.
.. method:: Loop.default_exception_handler(context)
The default exception handler that is called.
.. method:: Loop.call_exception_handler(context)
Call the current exception handler. The argument *context* is passed through and
is a dictionary containing keys: ``'message'``, ``'exception'``, ``'future'``.

View File

@ -1,14 +1,24 @@
:mod:`builtins` -- builtin functions and exceptions :mod:`builtins` -- builtin functions and exceptions
=================================================== ===================================================
.. module:: builtins
:synopsis: builtin Python functions
All builtin functions and exceptions are described here. They are also All builtin functions and exceptions are described here. They are also
available via ``builtins`` module. available via the ``builtins`` module.
For more information about built-ins, see the following CPython documentation:
* `Builtin CPython Functions <https://docs.python.org/3/library/functions.html>`_
* `Builtin CPython Exceptions <https://docs.python.org/3/library/exceptions.html>`_
* `Builtin CPython Constants <https://docs.python.org/3/library/constants.html>`_
.. note:: Not all of these functions, types, exceptions, and constants are turned
on in all CircuitPython ports, for space reasons.
Functions and types Functions and types
------------------- -------------------
Not all of these functions and types are turned on in all CircuitPython ports, for space reasons.
.. function:: abs() .. function:: abs()
.. function:: all() .. function:: all()
@ -160,20 +170,34 @@ Not all of these functions and types are turned on in all CircuitPython ports, f
Exceptions Exceptions
---------- ----------
.. exception:: ArithmeticError
.. exception:: AssertionError .. exception:: AssertionError
.. exception:: AttributeError .. exception:: AttributeError
.. exception:: BaseException
.. exception:: BrokenPipeError
.. exception:: ConnectionError
.. exception:: EOFError
.. exception:: Exception .. exception:: Exception
.. exception:: ImportError .. exception:: ImportError
.. exception:: IndentationError
.. exception:: IndexError .. exception:: IndexError
.. exception:: KeyboardInterrupt .. exception:: KeyboardInterrupt
.. exception:: KeyError .. exception:: KeyError
.. exception:: LookupError
.. exception:: MemoryError .. exception:: MemoryError
.. exception:: NameError .. exception:: NameError
@ -182,24 +206,37 @@ Exceptions
.. exception:: OSError .. exception:: OSError
.. exception:: OverflowError
.. exception:: RuntimeError .. exception:: RuntimeError
.. exception:: ReloadException .. exception:: ReloadException
`ReloadException` is used internally to deal with soft restarts. `ReloadException` is used internally to deal with soft restarts.
Not a part of the CPython standard library
.. exception:: StopAsyncIteration
.. exception:: StopIteration .. exception:: StopIteration
.. exception:: SyntaxError .. exception:: SyntaxError
.. exception:: SystemExit .. exception:: SystemExit
|see_cpython| :py:class:`cpython:SystemExit`. .. exception:: TimeoutError
.. exception:: TypeError .. exception:: TypeError
|see_cpython| :py:class:`cpython:TypeError`. .. exception:: UnicodeError
.. exception:: ValueError .. exception:: ValueError
.. exception:: ZeroDivisionError .. exception:: ZeroDivisionError
Constants
---------
.. data:: Ellipsis
.. data:: NotImplemented

View File

@ -28,7 +28,7 @@ Classes
- The optional *flags* can be 1 to check for overflow when adding items. - The optional *flags* can be 1 to check for overflow when adding items.
As well as supporting `bool` and `len`, deque objects have the following As well as supporting ``bool`` and ``len``, deque objects have the following
methods: methods:
.. method:: deque.append(x) .. method:: deque.append(x)

View File

@ -1,59 +0,0 @@
:mod:`hashlib` -- hashing algorithms
=====================================
.. include:: ../templates/unsupported_in_circuitpython.inc
.. module:: hashlib
:synopsis: hashing algorithms
|see_cpython_module| :mod:`cpython:hashlib`.
This module implements binary data hashing algorithms. The exact inventory
of available algorithms depends on a board. Among the algorithms which may
be implemented:
* SHA256 - The current generation, modern hashing algorithm (of SHA2 series).
It is suitable for cryptographically-secure purposes. Included in the
MicroPython core and any board is recommended to provide this, unless
it has particular code size constraints.
* SHA1 - A previous generation algorithm. Not recommended for new usages,
but SHA1 is a part of number of Internet standards and existing
applications, so boards targeting network connectivity and
interoperability will try to provide this.
* MD5 - A legacy algorithm, not considered cryptographically secure. Only
selected boards, targeting interoperability with legacy applications,
will offer this.
Constructors
------------
.. class:: hashlib.sha256([data])
Create an SHA256 hasher object and optionally feed ``data`` into it.
.. class:: hashlib.sha1([data])
Create an SHA1 hasher object and optionally feed ``data`` into it.
.. class:: hashlib.md5([data])
Create an MD5 hasher object and optionally feed ``data`` into it.
Methods
-------
.. method:: hash.update(data)
Feed more binary data into hash.
.. method:: hash.digest()
Return hash for all data passed through hash, as a bytes object. After this
method is called, more data cannot be fed into the hash any longer.
.. method:: hash.hexdigest()
This method is NOT implemented. Use ``binascii.hexlify(hash.digest())``
to achieve a similar effect.

View File

@ -19,7 +19,7 @@ limited flash memory, usually on non-Express builds:
``binascii``, ``errno``, ``json``, ``re``. ``binascii``, ``errno``, ``json``, ``re``.
These libraries are not currently enabled in any CircuitPython build, but may be in the future: These libraries are not currently enabled in any CircuitPython build, but may be in the future:
``ctypes``, ``hashlib``, ``zlib``. ``ctypes``
.. toctree:: .. toctree::
:maxdepth: 1 :maxdepth: 1
@ -31,15 +31,12 @@ These libraries are not currently enabled in any CircuitPython build, but may be
collections.rst collections.rst
errno.rst errno.rst
gc.rst gc.rst
hashlib.rst
io.rst io.rst
json.rst json.rst
re.rst re.rst
sys.rst sys.rst
asyncio.rst
ctypes.rst ctypes.rst
select.rst select.rst
zlib.rst
Omitted functions in the ``string`` library Omitted functions in the ``string`` library
------------------------------------------- -------------------------------------------

View File

@ -76,7 +76,7 @@ Functions
.. function:: heap_locked() .. function:: heap_locked()
Lock or unlock the heap. When locked no memory allocation can occur and a Lock or unlock the heap. When locked no memory allocation can occur and a
`MemoryError` will be raised if any heap allocation is attempted. ``MemoryError`` will be raised if any heap allocation is attempted.
`heap_locked()` returns a true value if the heap is currently locked. `heap_locked()` returns a true value if the heap is currently locked.
These functions can be nested, ie `heap_lock()` can be called multiple times These functions can be nested, ie `heap_lock()` can be called multiple times

View File

@ -1,40 +0,0 @@
:mod:`zlib` -- zlib decompression
=================================
.. include:: ../templates/unsupported_in_circuitpython.inc
.. module:: zlib
:synopsis: zlib decompression
|see_cpython_module| :mod:`cpython:zlib`.
This module allows to decompress binary data compressed with
`DEFLATE algorithm <https://en.wikipedia.org/wiki/DEFLATE>`_
(commonly used in zlib library and gzip archiver). Compression
is not yet implemented.
Functions
---------
.. function:: decompress(data, wbits=0, bufsize=0, /)
Return decompressed *data* as bytes. *wbits* is DEFLATE dictionary window
size used during compression (8-15, the dictionary size is power of 2 of
that value). Additionally, if value is positive, *data* is assumed to be
zlib stream (with zlib header). Otherwise, if it's negative, it's assumed
to be raw DEFLATE stream. *bufsize* parameter is for compatibility with
CPython and is ignored.
.. class:: DecompIO(stream, wbits=0, /)
Create a ``stream`` wrapper which allows transparent decompression of
compressed data in another *stream*. This allows to process compressed
streams with data larger than available heap size. In addition to
values described in :func:`decompress`, *wbits* may take values
24..31 (16 + 8..15), meaning that input stream has gzip header.
.. admonition:: Difference to CPython
:class: attention
This class is MicroPython extension. It's included on provisional
basis and may be changed considerably or removed in later versions.

View File

@ -71,7 +71,7 @@ as a natural "TODO" list. An example minimal build list is shown below:
CIRCUITPY_SDCARDIO = 0 CIRCUITPY_SDCARDIO = 0
CIRCUITPY_FRAMEBUFFERIO = 0 CIRCUITPY_FRAMEBUFFERIO = 0
CIRCUITPY_FREQUENCYIO = 0 CIRCUITPY_FREQUENCYIO = 0
CIRCUITPY_I2CPERIPHERAL = 0 CIRCUITPY_I2CTARGET = 0
# Requires SPI, PulseIO (stub ok): # Requires SPI, PulseIO (stub ok):
CIRCUITPY_DISPLAYIO = 0 CIRCUITPY_DISPLAYIO = 0
@ -79,8 +79,6 @@ as a natural "TODO" list. An example minimal build list is shown below:
# any port once their prerequisites in common-hal are complete. # any port once their prerequisites in common-hal are complete.
# Requires DigitalIO: # Requires DigitalIO:
CIRCUITPY_BITBANGIO = 0 CIRCUITPY_BITBANGIO = 0
# Requires DigitalIO
CIRCUITPY_GAMEPADSHIFT = 0
# Requires neopixel_write or SPI (dotstar) # Requires neopixel_write or SPI (dotstar)
CIRCUITPY_PIXELBUF = 0 CIRCUITPY_PIXELBUF = 0
# Requires OS # Requires OS

View File

@ -45,12 +45,10 @@ shared-bindings/audiomp3/__init__.rst shared-bindings/audiomp3/
shared-bindings/audiopwmio/PWMAudioOut.rst shared-bindings/audiopwmio/#audiopwmio.PWMAudioOut shared-bindings/audiopwmio/PWMAudioOut.rst shared-bindings/audiopwmio/#audiopwmio.PWMAudioOut
shared-bindings/audiopwmio/__init__.rst shared-bindings/audiopwmio/ shared-bindings/audiopwmio/__init__.rst shared-bindings/audiopwmio/
shared-bindings/bitbangio/I2C.rst shared-bindings/bitbangio/#bitbangio.I2C shared-bindings/bitbangio/I2C.rst shared-bindings/bitbangio/#bitbangio.I2C
shared-bindings/bitbangio/OneWire.rst shared-bindings/bitbangio/#bitbangio.OneWire
shared-bindings/bitbangio/SPI.rst shared-bindings/bitbangio/#bitbangio.SPI shared-bindings/bitbangio/SPI.rst shared-bindings/bitbangio/#bitbangio.SPI
shared-bindings/bitbangio/__init__.rst shared-bindings/bitbangio/ shared-bindings/bitbangio/__init__.rst shared-bindings/bitbangio/
shared-bindings/board/__init__.rst shared-bindings/board/ shared-bindings/board/__init__.rst shared-bindings/board/
shared-bindings/busio/I2C.rst shared-bindings/busio/#busio.I2C shared-bindings/busio/I2C.rst shared-bindings/busio/#busio.I2C
shared-bindings/busio/OneWire.rst shared-bindings/busio/#busio.OneWire
shared-bindings/busio/Parity.rst shared-bindings/busio/#busio.Parity shared-bindings/busio/Parity.rst shared-bindings/busio/#busio.Parity
shared-bindings/busio/SPI.rst shared-bindings/busio/#busio.SPI shared-bindings/busio/SPI.rst shared-bindings/busio/#busio.SPI
shared-bindings/busio/UART.rst shared-bindings/busio/#busio.UART shared-bindings/busio/UART.rst shared-bindings/busio/#busio.UART
@ -82,10 +80,6 @@ shared-bindings/framebufferio/FramebufferDisplay.rst shared-bindings/framebuffer
shared-bindings/framebufferio/__init__.rst shared-bindings/framebufferio/ shared-bindings/framebufferio/__init__.rst shared-bindings/framebufferio/
shared-bindings/frequencyio/FrequencyIn.rst shared-bindings/frequencyio/#frequencyio.FrequencyIn shared-bindings/frequencyio/FrequencyIn.rst shared-bindings/frequencyio/#frequencyio.FrequencyIn
shared-bindings/frequencyio/__init__.rst shared-bindings/frequencyio/ shared-bindings/frequencyio/__init__.rst shared-bindings/frequencyio/
shared-bindings/gamepad/GamePad.rst shared-bindings/gamepad/#gamepad.GamePad
shared-bindings/gamepad/__init__.rst shared-bindings/gamepad/
shared-bindings/gamepadshift/GamePadShift.rst shared-bindings/gamepadshift/#gamepadshift.GamePadShift
shared-bindings/gamepadshift/__init__.rst shared-bindings/gamepadshift/
shared-bindings/gnss/__init__.rst shared-bindings/gnss/ shared-bindings/gnss/__init__.rst shared-bindings/gnss/
shared-bindings/i2cperipheral/__init__.rst shared-bindings/i2cperipheral/ shared-bindings/i2cperipheral/__init__.rst shared-bindings/i2cperipheral/
shared-bindings/i2csecondary/__init__.rst shared-bindings/i2csecondary/ shared-bindings/i2csecondary/__init__.rst shared-bindings/i2csecondary/
@ -96,11 +90,11 @@ shared-bindings/microcontroller/Pin.rst shared-bindings/microcontroller/#microco
shared-bindings/microcontroller/Processor.rst shared-bindings/microcontroller/#microcontroller.Processor shared-bindings/microcontroller/Processor.rst shared-bindings/microcontroller/#microcontroller.Processor
shared-bindings/microcontroller/RunMode.rst shared-bindings/microcontroller/#microcontroller.RunMode shared-bindings/microcontroller/RunMode.rst shared-bindings/microcontroller/#microcontroller.RunMode
shared-bindings/microcontroller/__init__.rst shared-bindings/microcontroller/ shared-bindings/microcontroller/__init__.rst shared-bindings/microcontroller/
shared-bindings/multiterminal/__init__.rst shared-bindings/multiterminal/
shared-bindings/neopixel_write/__init__.rst shared-bindings/neopixel_write/ shared-bindings/neopixel_write/__init__.rst shared-bindings/neopixel_write/
shared-bindings/network/__init__.rst shared-bindings/network/ shared-bindings/network/__init__.rst shared-bindings/network/
shared-bindings/nvm/ByteArray.rst shared-bindings/nvm/#nvm.ByteArray shared-bindings/nvm/ByteArray.rst shared-bindings/nvm/#nvm.ByteArray
shared-bindings/nvm/__init__.rst shared-bindings/nvm/ shared-bindings/nvm/__init__.rst shared-bindings/nvm/
shared-bindings/onewireio/OneWire.rst shared-bindings/onewireio/#onewireio.OneWire
shared-bindings/os/__init__.rst shared-bindings/os/ shared-bindings/os/__init__.rst shared-bindings/os/
shared-bindings/protomatter/__init__.rst shared-bindings/protomatter/ shared-bindings/protomatter/__init__.rst shared-bindings/protomatter/
shared-bindings/ps2io/Ps2.rst shared-bindings/ps2io/#ps2io.Ps2 shared-bindings/ps2io/Ps2.rst shared-bindings/ps2io/#ps2io.Ps2

View File

@ -27,12 +27,24 @@ import pathlib
import re import re
import subprocess import subprocess
import sys import sys
import functools
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
SUPPORTED_PORTS = ['atmel-samd', 'broadcom', 'cxd56', 'espressif', 'litex', 'mimxrt10xx', 'nrf', 'raspberrypi', 'stm'] SUPPORTED_PORTS = [
"atmel-samd",
"broadcom",
"cxd56",
"espressif",
"litex",
"mimxrt10xx",
"nrf",
"raspberrypi",
"silabs",
"stm",
]
aliases_by_board = { ALIASES_BY_BOARD = {
"circuitplayground_express": [ "circuitplayground_express": [
"circuitplayground_express_4h", "circuitplayground_express_4h",
"circuitplayground_express_digikey_pycon2019", "circuitplayground_express_digikey_pycon2019",
@ -40,49 +52,104 @@ aliases_by_board = {
"pybadge": ["edgebadge"], "pybadge": ["edgebadge"],
"pyportal": ["pyportal_pynt"], "pyportal": ["pyportal_pynt"],
"gemma_m0": ["gemma_m0_pycon2018"], "gemma_m0": ["gemma_m0_pycon2018"],
"pewpew10": ["pewpew13"],
} }
aliases_brand_names = { ALIASES_BRAND_NAMES = {
"circuitplayground_express_4h": "circuitplayground_express_4h": "Adafruit Circuit Playground Express 4-H",
"Adafruit Circuit Playground Express 4-H", "circuitplayground_express_digikey_pycon2019": "Circuit Playground Express Digi-Key PyCon 2019",
"circuitplayground_express_digikey_pycon2019": "edgebadge": "Adafruit EdgeBadge",
"Circuit Playground Express Digi-Key PyCon 2019", "pyportal_pynt": "Adafruit PyPortal Pynt",
"edgebadge": "gemma_m0_pycon2018": "Adafruit Gemma M0 PyCon 2018",
"Adafruit EdgeBadge",
"pyportal_pynt":
"Adafruit PyPortal Pynt",
"gemma_m0_pycon2018":
"Adafruit Gemma M0 PyCon 2018",
"pewpew13":
"PewPew 13",
} }
additional_modules = { ADDITIONAL_MODULES = {
"fontio": "CIRCUITPY_DISPLAYIO", "_asyncio": "MICROPY_PY_UASYNCIO",
"terminalio": "CIRCUITPY_DISPLAYIO",
"adafruit_bus_device": "CIRCUITPY_BUSDEVICE", "adafruit_bus_device": "CIRCUITPY_BUSDEVICE",
"adafruit_pixelbuf": "CIRCUITPY_PIXELBUF" "adafruit_pixelbuf": "CIRCUITPY_PIXELBUF",
"array": "CIRCUITPY_ARRAY",
# always available, so depend on something that's always 1.
"builtins": "CIRCUITPY",
"collections": "CIRCUITPY_COLLECTIONS",
"fontio": "CIRCUITPY_DISPLAYIO",
"io": "CIRCUITPY_IO",
"select": "MICROPY_PY_USELECT_SELECT",
"terminalio": "CIRCUITPY_DISPLAYIO",
"sys": "CIRCUITPY_SYS",
"usb": "CIRCUITPY_USB_HOST",
} }
MODULES_NOT_IN_BINDINGS = [
"_asyncio",
"array",
"binascii",
"builtins",
"collections",
"errno",
"json",
"re",
"select",
"sys",
"ulab",
]
FROZEN_EXCLUDES = ["examples", "docs", "tests", "utils", "conf.py", "setup.py"]
"""Files and dirs at the root of a frozen directory that should be ignored.
This is the same list as in the preprocess_frozen_modules script."""
repository_urls = {}
"""Cache of repository URLs for frozen modules."""
root_dir = pathlib.Path(__file__).resolve().parent.parent
def get_circuitpython_root_dir(): def get_circuitpython_root_dir():
""" The path to the root './circuitpython' directory """The path to the root './circuitpython' directory."""
"""
file_path = pathlib.Path(__file__).resolve()
root_dir = file_path.parent.parent
return root_dir return root_dir
def get_shared_bindings():
""" Get a list of modules in shared-bindings based on folder names def get_bindings():
"""Get a list of modules in shared-bindings and ports/*/bindings based on folder names."""
shared_bindings_modules = [
module.name
for module in (get_circuitpython_root_dir() / "shared-bindings").iterdir()
if module.is_dir()
]
bindings_modules = []
for d in get_circuitpython_root_dir().glob("ports/*/bindings"):
bindings_modules.extend(module.name for module in d.iterdir() if d.is_dir())
return shared_bindings_modules + bindings_modules + MODULES_NOT_IN_BINDINGS
def get_board_mapping():
""" """
shared_bindings_dir = get_circuitpython_root_dir() / "shared-bindings" Compiles the list of boards from the directories, with aliases and mapping
return [item.name for item in shared_bindings_dir.iterdir()] + ["binascii", "errno", "json", "re", "ulab"] to the port.
"""
boards = {}
for port in SUPPORTED_PORTS:
board_path = root_dir / "ports" / port / "boards"
for board_path in os.scandir(board_path):
if board_path.is_dir():
board_files = os.listdir(board_path.path)
board_id = board_path.name
aliases = ALIASES_BY_BOARD.get(board_path.name, [])
boards[board_id] = {
"port": port,
"download_count": 0,
"aliases": aliases,
}
for alias in aliases:
boards[alias] = {
"port": port,
"download_count": 0,
"alias": True,
"aliases": [],
}
return boards
def read_mpconfig(): def read_mpconfig():
""" Open 'circuitpy_mpconfig.mk' and return the contents. """Open 'circuitpy_mpconfig.mk' and return the contents."""
"""
configs = [] configs = []
cpy_mpcfg = get_circuitpython_root_dir() / "py" / "circuitpy_mpconfig.mk" cpy_mpcfg = get_circuitpython_root_dir() / "py" / "circuitpy_mpconfig.mk"
with open(cpy_mpcfg) as mpconfig: with open(cpy_mpcfg) as mpconfig:
@ -99,15 +166,15 @@ def build_module_map():
""" """
base = dict() base = dict()
modules = get_shared_bindings() modules = get_bindings()
configs = read_mpconfig() configs = read_mpconfig()
full_build = False full_build = False
for module in modules: for module in modules:
full_name = module full_name = module
if module in additional_modules: if module in ADDITIONAL_MODULES:
search_identifier = additional_modules[module] search_identifier = ADDITIONAL_MODULES[module]
else: else:
search_identifier = 'CIRCUITPY_'+module.lstrip("_").upper() search_identifier = "CIRCUITPY_" + module.lstrip("_").upper()
re_pattern = f"{re.escape(search_identifier)}\s*\??=\s*(.+)" re_pattern = f"{re.escape(search_identifier)}\s*\??=\s*(.+)"
find_config = re.findall(re_pattern, configs) find_config = re.findall(re_pattern, configs)
if not find_config: if not find_config:
@ -130,6 +197,7 @@ def build_module_map():
return base return base
def get_settings_from_makefile(port_dir, board_name): def get_settings_from_makefile(port_dir, board_name):
"""Invoke make in a mode which prints the database, then parse it for """Invoke make in a mode which prints the database, then parse it for
settings. settings.
@ -143,7 +211,7 @@ def get_settings_from_makefile(port_dir, board_name):
encoding="utf-8", encoding="utf-8",
errors="replace", errors="replace",
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE stderr=subprocess.PIPE,
) )
# Make signals errors with exit status 2; 0 and 1 are "non-error" statuses # Make signals errors with exit status 2; 0 and 1 are "non-error" statuses
if contents.returncode not in (0, 1): if contents.returncode not in (0, 1):
@ -154,32 +222,103 @@ def get_settings_from_makefile(port_dir, board_name):
raise RuntimeError(error_msg) raise RuntimeError(error_msg)
settings = {} settings = {}
for line in contents.stdout.split('\n'): for line in contents.stdout.split("\n"):
# Handle both = and := definitions. # Handle both = and := definitions.
m = re.match(r'^([A-Z][A-Z0-9_]*) :?= (.*)$', line) m = re.match(r"^([A-Z][A-Z0-9_]*) :?= (.*)$", line)
if m: if m:
settings[m.group(1)] = m.group(2) settings[m.group(1)] = m.group(2)
return settings return settings
def lookup_setting(settings, key, default=''):
def get_repository_url(directory):
if directory in repository_urls:
return repository_urls[directory]
readme = None
for readme_path in (
os.path.join(directory, "README.rst"),
os.path.join(os.path.dirname(directory), "README.rst"),
):
if os.path.exists(readme_path):
readme = readme_path
break
path = None
if readme:
with open(readme, "r") as fp:
for line in fp.readlines():
if m := re.match(
"\s+:target:\s+(http\S+(docs.circuitpython|readthedocs)\S+)\s*",
line,
):
path = m.group(1)
break
if m := re.search("<(http[^>]+)>", line):
path = m.group(1)
break
if path is None:
contents = subprocess.run(
["git", "remote", "get-url", "origin"],
encoding="utf-8",
errors="replace",
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=directory,
)
path = contents.stdout.strip()
repository_urls[directory] = path
return path
def frozen_modules_from_dirs(frozen_mpy_dirs, withurl):
"""
Go through the list of frozen directories and extract the python modules.
Paths are of the type:
$(TOP)/frozen/Adafruit_CircuitPython_CircuitPlayground
$(TOP)/frozen/circuitpython-stage/meowbit
Python modules are at the root of the path, and are python files or directories
containing python files. Except the ones in the FROZEN_EXCLUDES list.
"""
frozen_modules = []
for frozen_path in filter(lambda x: x, frozen_mpy_dirs.split(" ")):
source_dir = get_circuitpython_root_dir() / frozen_path[7:]
url_repository = get_repository_url(source_dir)
for sub in source_dir.glob("*"):
if sub.name in FROZEN_EXCLUDES:
continue
if sub.name.endswith(".py"):
if withurl:
frozen_modules.append((sub.name[:-3], url_repository))
else:
frozen_modules.append(sub.name[:-3])
continue
if next(sub.glob("**/*.py"), None): # tests if not empty
if withurl:
frozen_modules.append((sub.name, url_repository))
else:
frozen_modules.append(sub.name)
return frozen_modules
def lookup_setting(settings, key, default=""):
while True: while True:
value = settings.get(key, default) value = settings.get(key, default)
if not value.startswith('$'): if not value.startswith("$"):
break break
key = value[2:-1] key = value[2:-1]
return value return value
@functools.cache
def all_ports_all_boards(ports=SUPPORTED_PORTS): def all_ports_all_boards(ports=SUPPORTED_PORTS):
for port in ports: for port in ports:
port_dir = get_circuitpython_root_dir() / "ports" / port port_dir = get_circuitpython_root_dir() / "ports" / port
for entry in (port_dir / "boards").iterdir(): for entry in (port_dir / "boards").iterdir():
if not entry.is_dir(): if not entry.is_dir():
continue continue
yield (port, entry) yield (port, entry)
def support_matrix_by_board(use_branded_name=True):
def support_matrix_by_board(use_branded_name=True, withurl=True):
"""Compiles a list of the available core modules available for each """Compiles a list of the available core modules available for each
board. board.
""" """
@ -193,8 +332,9 @@ def support_matrix_by_board(use_branded_name=True):
if use_branded_name: if use_branded_name:
with open(entry / "mpconfigboard.h") as get_name: with open(entry / "mpconfigboard.h") as get_name:
board_contents = get_name.read() board_contents = get_name.read()
board_name_re = re.search(r"(?<=MICROPY_HW_BOARD_NAME)\s+(.+)", board_name_re = re.search(
board_contents) r"(?<=MICROPY_HW_BOARD_NAME)\s+(.+)", board_contents
)
if board_name_re: if board_name_re:
board_name = board_name_re.group(1).strip('"') board_name = board_name_re.group(1).strip('"')
else: else:
@ -202,31 +342,69 @@ def support_matrix_by_board(use_branded_name=True):
board_modules = [] board_modules = []
for module in base: for module in base:
key = base[module]['key'] key = base[module]["key"]
if int(lookup_setting(settings, key, '0')): if int(lookup_setting(settings, key, "0")):
board_modules.append(base[module]['name']) board_modules.append(base[module]["name"])
board_modules.sort() board_modules.sort()
if "CIRCUITPY_BUILD_EXTENSIONS" in settings:
board_extensions = [
extension.strip()
for extension in settings["CIRCUITPY_BUILD_EXTENSIONS"].split(",")
]
else:
raise OSError(f"Board extensions undefined: {board_name}.")
frozen_modules = []
if "FROZEN_MPY_DIRS" in settings:
frozen_modules = frozen_modules_from_dirs(
settings["FROZEN_MPY_DIRS"], withurl
)
if frozen_modules:
frozen_modules.sort()
# generate alias boards too # generate alias boards too
board_matrix = [(board_name, board_modules)] board_matrix = [
if entry.name in aliases_by_board: (
for alias in aliases_by_board[entry.name]: board_name,
{
"modules": board_modules,
"frozen_libraries": frozen_modules,
"extensions": board_extensions,
},
)
]
if entry.name in ALIASES_BY_BOARD:
for alias in ALIASES_BY_BOARD[entry.name]:
if use_branded_name: if use_branded_name:
if alias in aliases_brand_names: if alias in ALIASES_BRAND_NAMES:
alias = aliases_brand_names[alias] alias = ALIASES_BRAND_NAMES[alias]
else: else:
alias = alias.replace("_", " ").title() alias = alias.replace("_", " ").title()
board_matrix.append( (alias, board_modules) ) board_matrix.append(
(
alias,
{
"modules": board_modules,
"frozen_libraries": frozen_modules,
"extensions": board_extensions,
},
)
)
return board_matrix # this is now a list of (board,modules) return board_matrix # this is now a list of (board,modules)
executor = ThreadPoolExecutor(max_workers=os.cpu_count()) executor = ThreadPoolExecutor(max_workers=os.cpu_count())
mapped_exec = executor.map(support_matrix, all_ports_all_boards()) mapped_exec = executor.map(support_matrix, all_ports_all_boards())
# flatmap with comprehensions # flatmap with comprehensions
boards = dict(sorted([board for matrix in mapped_exec for board in matrix])) boards = dict(
sorted(
[board for matrix in mapped_exec for board in matrix], key=lambda x: x[0]
)
)
# print(json.dumps(boards, indent=2))
return boards return boards
if __name__ == '__main__':
if __name__ == "__main__":
print(json.dumps(support_matrix_by_board(), indent=2)) print(json.dumps(support_matrix_by_board(), indent=2))

View File

@ -7,8 +7,21 @@
right: 10px; right: 10px;
top: 4px; top: 4px;
} }
.support-matrix-table .this_module code,
.support-matrix-table .this_module span { .support-matrix-table .reference.external {
box-sizing: border-box;
font-weight: 700;
color: #404040;
font-family: "SFMono-Regular", Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", Courier, monospace;
padding: 2px 5px;
background: white;
border: 1px solid #e1e4e5;
font-size: 75%;
}
.support-matrix-table .this_module,
.support-matrix-table .this_module.reference.external,
.support-matrix-table .this_module * {
background: black; background: black;
color: white; color: white;
} }

View File

@ -44,14 +44,14 @@ $(() => {
var nvisible = 0; var nvisible = 0;
$(".support-matrix-table tbody tr").each( (index,item) => { $(".support-matrix-table tbody tr").each( (index,item) => {
var name = $(item).find("td:first-child p").html(); var name = $(item).find("td:first-child p").html();
var modules = $(item).find("a.reference.internal"); var modules = $(item).find("code, a.reference.external");
var matching_all = true; var matching_all = true;
// //
list_search.forEach((sstring) => { list_search.forEach((sstring) => {
var matching = (sstring[0] == "-"); var matching = (sstring[0] == "-");
for(var modi = 0; modi < modules.length; ++modi) { for(var modi = 0; modi < modules.length; ++modi) {
module = modules[modi]; module = modules[modi];
var mod_name = module.firstChild.firstChild.textContent; var mod_name = module.firstChild.textContent;
if(sstring[0] == "-") { if(sstring[0] == "-") {
if(mod_name.match(sstring.substr(1))) { if(mod_name.match(sstring.substr(1))) {
matching = false; matching = false;

432
docs/workflows.md Normal file
View File

@ -0,0 +1,432 @@
# Workflows
Workflows are the process used to 1) manipulate files on the CircuitPython device and 2) interact
with the serial connection to CircuitPython. The serial connection is usually used to access the
REPL.
Starting with CircuitPython 3.x we moved to a USB-only workflow. Prior to that, we used the serial
connection alone to do the whole workflow. In CircuitPython 7.x, a BLE workflow was added with the
advantage of working with mobile devices. CircuitPython 8.x added a web workflow that works over the
local network (usually Wi-Fi) and a web browser. Other clients can also use the Web REST API. Boards
should clearly document which workflows are supported.
Code for workflows lives in `supervisor/shared`.
The workflow APIs are documented here.
## USB
These USB interfaces are enabled by default on boards with USB support. They are usable once the
device has been plugged into a host.
### CIRCUITPY drive
CircuitPython exposes a standard mass storage (MSC) interface to enable file manipulation over a
standard interface. This interface works underneath the file system at the block level so using it
excludes other types of workflows from manipulating the file system at the same time.
### CDC serial
CircuitPython exposes one CDC USB interface for CircuitPython serial. This is a standard serial
USB interface.
TODO: Document how it designates itself from the user CDC.
Setting baudrate 1200 and disconnecting will reboot into a bootloader. (Used by Arduino to trigger
a reset into bootloader.)
## BLE
The BLE workflow is enabled for nRF boards. By default, to prevent malicious access, it is disabled.
To connect to the BLE workflow, press the reset button while the status led blinks blue quickly
after the safe mode blinks. The board will restart and broadcast the file transfer service UUID
(`0xfebb`) along with the board's [Creation IDs](https://github.com/creationid/creators). This
public broadcast is done at a lower transmit level so the devices must be closer. On connection, the
device will need to pair and bond. Once bonded, the device will broadcast whenever disconnected
using a rotating key rather than a static one. Non-bonded devices won't be able to resolve it. After
connection, the central device can discover two default services. One for file transfer and one for
CircuitPython specifically that includes serial characteristics.
To change the default BLE advertising name without (or before) running user code, the desired name
can be put in the `settings.toml` file. The key is `CIRCUITPY_BLE_NAME`. It's limited to approximately
30 characters depending on the port's settings and will be truncated if longer.
### File Transfer API
CircuitPython uses [an open File Transfer API](https://github.com/adafruit/Adafruit_CircuitPython_BLE_File_Transfer)
to enable file system access.
### CircuitPython Service
The base UUID for the CircuitPython service is `ADAFXXXX-4369-7263-7569-7450794686e`. The `XXXX` is
replaced by the four specific digits below. The service itself is `0001`.
#### TX - `0002` / RX - `0003`
These characteristic work just like the Nordic Uart Service (NUS) but have different UUIDs to prevent
conflicts with user created NUS services.
#### Version - `0100`
Read-only characteristic that returns the UTF-8 encoded version string.
## Web
The web workflow is depends on adding Wi-Fi credentials into the `settings.toml` file. The keys are
`CIRCUITPY_WIFI_SSID` and `CIRCUITPY_WIFI_PASSWORD`. Once these are defined, CircuitPython will
automatically connect to the network and start the webserver used for the workflow. The webserver
is on port 80 unless overridden by `CIRCUITPY_WEB_API_PORT`. It also enables MDNS. The name
of the board as advertised to the network can be overridden by `CIRCUITPY_WEB_INSTANCE_NAME`.
Here is an example `/settings.toml`:
```bash
# To auto-connect to Wi-Fi
CIRCUITPY_WIFI_SSID="scottswifi"
CIRCUITPY_WIFI_PASSWORD="secretpassword"
# To enable modifying files from the web. Change this too!
# Leave the User field blank in the browser.
CIRCUITPY_WEB_API_PASSWORD="passw0rd"
CIRCUITPY_WEB_API_PORT=80
CIRCUITPY_WEB_INSTANCE_NAME=""
```
MDNS is used to resolve [`circuitpython.local`](http://circuitpython.local) to a device specific
hostname of the form `cpy-XXXXXX.local`. The `XXXXXX` is based on network MAC address. The device
also provides the MDNS service with service type `_circuitpython` and protocol `_tcp`.
### HTTP
The web server is HTTP 1.1 and may use chunked responses so that it doesn't need to precompute
content length.
The API generally consists of an HTTP method such as GET or PUT and a path. Requests and responses
also have headers. Responses will contain a status code and status text such as `404 Not Found`.
This API tries to use standard status codes to encode the status of the various operations. The
[Mozilla Developer Network HTTP docs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP)
are a great reference.
#### Examples
The examples use `curl`, a common command line program for issuing HTTP requests. The examples below
use `circuitpython.local` as the easiest way to work. If you have multiple active devices, you'll
want to use the specific `cpy-XXXXXX.local` version.
The examples also use `passw0rd` as the password placeholder. Replace it with your password before
running the example.
### `/`
The root welcome page links to the file system page and also displays other CircuitPython devices
found using MDNS service discovery. This allows web browsers to find other devices from one. (All
devices will respond to `circuitpython.local` so the device redirected to may vary.)
### CORS
The web server will allow requests from `cpy-XXXXXX.local`, `127.0.0.1`, the device's IP and
`code.circuitpython.org`. (`circuitpython.local` requests will be redirected to `cpy-XXXXXX.local`.)
### File REST API
All file system related APIs are protected by HTTP basic authentication. It is *NOT* secure but will
hopefully prevent some griefing in shared settings. The password is sent unencrypted so do not reuse
a password with something important. The user field is left blank.
The password is taken from `settings.toml` with the key `CIRCUITPY_WEB_API_PASSWORD`. If this is unset, the
server will respond with `403 Forbidden`. When a password is set, but not provided in a request, it
will respond `401 Unauthorized`.
#### `/fs/`
The `/fs/` page will respond with a directory browsing HTML once authenticated. This page is always
gzipped. If the `Accept: application/json` header is provided, then the JSON representation of the
root will be returned.
##### OPTIONS
When requested with the `OPTIONS` method, the server will respond with CORS related headers. Most
aren't needed for API use. They are there for the web browser.
* `Access-Control-Allow-Methods` - Varies with USB state. `GET, OPTIONS` when USB is active. `GET, OPTIONS, PUT, DELETE, MOVE` otherwise.
Example:
```sh
curl -v -u :passw0rd -X OPTIONS -L --location-trusted http://circuitpython.local/fs/
```
#### `/fs/<directory path>/`
Directory paths must end with a /. Otherwise, the path is assumed to be a file.
##### GET
Returns a JSON representation of the directory.
* `200 OK` - Directory exists and JSON returned
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - Missing directory
Returns information about each file in the directory:
* `name` - File name. No trailing `/` on directory names
* `directory` - `true` when a directory. `false` otherwise
* `modified_ns` - File modification time in nanoseconds since January 1st, 1970. May not use full resolution
* `file_size` - File size in bytes. `0` for directories
Example:
```sh
curl -v -u :passw0rd -H "Accept: application/json" -L --location-trusted http://circuitpython.local/fs/lib/hello/
```
```json
[
{
"name": "world.txt",
"directory": false,
"modified_ns": 946934328000000000,
"file_size": 12
}
]
```
##### PUT
Tries to make a directory at the given path. Request body is ignored. The custom `X-Timestamp`
header can provide a timestamp in milliseconds since January 1st, 1970 (to match JavaScript's file
time resolution) used for the directories modification time. The RTC time will used otherwise.
Returns:
* `204 No Content` - Directory exists
* `201 Created` - Directory created
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `409 Conflict` - USB is active and preventing file system modification
* `404 Not Found` - Missing parent directory
* `500 Server Error` - Other, unhandled error
Example:
```sh
curl -v -u :passw0rd -X PUT -L --location-trusted http://circuitpython.local/fs/lib/hello/world/
```
##### Move
Moves the directory at the given path to ``X-Destination``. Also known as rename.
The custom `X-Destination` header stores the destination path of the directory.
* `201 Created` - Directory renamed
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - Source directory not found or destination path is missing
* `409 Conflict` - USB is active and preventing file system modification
* `412 Precondition Failed` - The destination path is already in use
Example:
```sh
curl -v -u :passw0rd -X MOVE -H "X-Destination: /fs/lib/hello2/" -L --location-trusted http://circuitpython.local/fs/lib/hello/
```
##### DELETE
Deletes the directory and all of its contents.
* `204 No Content` - Directory and its contents deleted
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - No directory
* `409 Conflict` - USB is active and preventing file system modification
Example:
```sh
curl -v -u :passw0rd -X DELETE -L --location-trusted http://circuitpython.local/fs/lib/hello2/world/
```
#### `/fs/<file path>`
##### PUT
Stores the provided content to the file path.
The custom `X-Timestamp` header can provide a timestamp in milliseconds since January 1st, 1970
(to match JavaScript's file time resolution) used for the directories modification time. The RTC
time will used otherwise.
Returns:
* `201 Created` - File created and saved
* `204 No Content` - File existed and overwritten
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - Missing parent directory
* `409 Conflict` - USB is active and preventing file system modification
* `413 Payload Too Large` - `Expect` header not sent and file is too large
* `417 Expectation Failed` - `Expect` header sent and file is too large
* `500 Server Error` - Other, unhandled error
If the client sends the `Expect` header, the server will reply with `100 Continue` when ok.
Example:
```sh
echo "Hello world" >> test.txt
curl -v -u :passw0rd -T test.txt -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
```
##### GET
Returns the raw file contents. `Content-Type` will be set based on extension:
* `text/plain` - `.py`, `.txt`
* `text/javascript` - `.js`
* `text/html` - `.html`
* `application/json` - `.json`
* `application/octet-stream` - Everything else
Will return:
* `200 OK` - File exists and file returned
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - Missing file
Example:
```sh
curl -v -u :passw0rd -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
```
##### Move
Moves the file at the given path to the ``X-Destination``. Also known as rename.
The custom `X-Destination` header stores the destination path of the file.
* `201 Created` - File renamed
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - Source file not found or destination path is missing
* `409 Conflict` - USB is active and preventing file system modification
* `412 Precondition Failed` - The destination path is already in use
Example:
```sh
curl -v -u :passw0rd -X MOVE -H "X-Destination: /fs/lib/hello/world2.txt" -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
```
##### DELETE
Deletes the file.
* `204 No Content` - File existed and deleted
* `401 Unauthorized` - Incorrect password
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
* `404 Not Found` - File not found
* `409 Conflict` - USB is active and preventing file system modification
Example:
```sh
curl -v -u :passw0rd -X DELETE -L --location-trusted http://circuitpython.local/fs/lib/hello/world2.txt
```
### `/cp/`
`/cp/` serves basic info about the CircuitPython device and others discovered through MDNS. It is
not protected by basic auth in case the device is someone elses.
Only `GET` requests are supported and will return `405 Method Not Allowed` otherwise.
#### `/cp/devices.json`
Returns information about other devices found on the network using MDNS.
* `total`: Total MDNS response count. May be more than in `devices` if internal limits were hit.
* `devices`: List of discovered devices.
* `hostname`: MDNS hostname
* `instance_name`: MDNS instance name. Defaults to human readable board name.
* `port`: Port of CircuitPython Web API
* `ip`: IP address
Example:
```sh
curl -v -L http://circuitpython.local/cp/devices.json
```
```json
{
"total": 1,
"devices": [
{
"hostname": "cpy-951032",
"instance_name": "Adafruit Feather ESP32-S2 TFT",
"port": 80,
"ip": "192.168.1.235"
}
]
}
```
#### `/cp/serial/`
Serves a basic serial terminal program when a `GET` request is received without the
`Upgrade: websocket` header. Otherwise the socket is upgraded to a WebSocket. See WebSockets below for more detail.
This is an authenticated endpoint in both modes.
#### `/cp/version.json`
Returns information about the device.
* `web_api_version`: Always `1`. This versions the rest of the API and new versions may not be backwards compatible.
* `version`: CircuitPython build version.
* `build_date`: CircuitPython build date.
* `board_name`: Human readable name of the board.
* `mcu_name`: Human readable name of the microcontroller.
* `board_id`: Board id used in code and on circuitpython.org.
* `creator_id`: Creator ID for the board.
* `creation_id`: Creation ID for the board, set by the creator.
* `hostname`: MDNS hostname.
* `port`: Port of CircuitPython Web Service.
* `ip`: IP address of the device.
Example:
```sh
curl -v -L http://circuitpython.local/cp/version.json
```
```json
{
"web_api_version": 1,
"version": "8.0.0-alpha.1-20-ge1d4518a9-dirty",
"build_date": "2022-06-24",
"board_name": "ESP32-S3-USB-OTG-N8",
"mcu_name": "ESP32S3",
"board_id": "espressif_esp32s3_usb_otg_n8",
"creator_id": 12346,
"creation_id": 28683,
"hostname": "cpy-f57ce8",
"port": 80,
"ip": "192.168.1.94"
}
```
#### `/code/`
The `/code/` page returns a small static html page that will pull in and load the full code editor from
[code.circuitpython.org](https://code.circuitpython.org) for a full code editor experience. Because most
of the resources reside online instead of the device, an active internet connection is required.
### Static files
* `/favicon.ico` - Blinka
* `/directory.js` - JavaScript for `/fs/`
* `/welcome.js` - JavaScript for `/`
### WebSocket
The CircuitPython serial interactions are available over a WebSocket. A WebSocket begins as a
special HTTP request that gets upgraded to a WebSocket. Authentication happens before upgrading.
WebSockets are *not* bare sockets once upgraded. Instead they have their own framing format for data.
CircuitPython can handle PING and CLOSE opcodes. All others are treated as TEXT. Data to
CircuitPython is expected to be masked UTF-8, as the spec requires. Data from CircuitPython to the
client is unmasked. It is also unbuffered so the client will get a variety of frame sizes.
Only one WebSocket at a time is supported.

View File

@ -88,7 +88,7 @@ mp_obj_t mpy_init(mp_obj_fun_bc_t *self, size_t n_args, size_t n_kw, mp_obj_t *a
// This must be first, it sets up the globals dict and other things // This must be first, it sets up the globals dict and other things
MP_DYNRUNTIME_INIT_ENTRY MP_DYNRUNTIME_INIT_ENTRY
// Messages can be printed as usualy // Messages can be printed as usually
mp_printf(&mp_plat_print, "initialising module self=%p\n", self); mp_printf(&mp_plat_print, "initialising module self=%p\n", self);
// Make the functions available in the module's namespace // Make the functions available in the module's namespace

View File

@ -33,6 +33,8 @@
#include "shared-bindings/supervisor/__init__.h" #include "shared-bindings/supervisor/__init__.h"
#endif #endif
#include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UASYNCIO #if MICROPY_PY_UASYNCIO
// Used when task cannot be guaranteed to be non-NULL. // Used when task cannot be guaranteed to be non-NULL.
@ -68,24 +70,21 @@ STATIC mp_obj_t task_getiter(mp_obj_t self_in, mp_obj_iter_buf_t *iter_buf);
/******************************************************************************/ /******************************************************************************/
// Ticks for task ordering in pairing heap // Ticks for task ordering in pairing heap
#if !CIRCUITPY || (defined(__unix__) || defined(__APPLE__))
STATIC mp_obj_t ticks(void) {
return MP_OBJ_NEW_SMALL_INT(mp_hal_ticks_ms() & (MICROPY_PY_UTIME_TICKS_PERIOD - 1));
}
STATIC mp_int_t ticks_diff(mp_obj_t t1_in, mp_obj_t t0_in) {
mp_uint_t t0 = MP_OBJ_SMALL_INT_VALUE(t0_in);
mp_uint_t t1 = MP_OBJ_SMALL_INT_VALUE(t1_in);
mp_int_t diff = ((t1 - t0 + MICROPY_PY_UTIME_TICKS_PERIOD / 2) & (MICROPY_PY_UTIME_TICKS_PERIOD - 1))
- MICROPY_PY_UTIME_TICKS_PERIOD / 2;
return diff;
}
#else
#define _TICKS_PERIOD (1lu << 29) #define _TICKS_PERIOD (1lu << 29)
#define _TICKS_MAX (_TICKS_PERIOD - 1) #define _TICKS_MAX (_TICKS_PERIOD - 1)
#define _TICKS_HALFPERIOD (_TICKS_PERIOD >> 1) #define _TICKS_HALFPERIOD (_TICKS_PERIOD >> 1)
#if !CIRCUITPY || (defined(__unix__) || defined(__APPLE__))
STATIC mp_obj_t ticks(void) {
return MP_OBJ_NEW_SMALL_INT(mp_hal_ticks_ms() & _TICKS_MAX);
}
#else
// We don't share the implementation above because our supervisor_ticks_ms
// starts the epoch about 65 seconds before the first overflow (see
// shared-bindings/supervisor/__init__.c). We assume/require that
// supervisor.ticks_ms is picked as the ticks implementation under
// CircuitPython for the Python-coded bits of asyncio.
#define ticks() supervisor_ticks_ms() #define ticks() supervisor_ticks_ms()
#endif
STATIC mp_int_t ticks_diff(mp_obj_t t1_in, mp_obj_t t0_in) { STATIC mp_int_t ticks_diff(mp_obj_t t1_in, mp_obj_t t0_in) {
mp_uint_t t0 = MP_OBJ_SMALL_INT_VALUE(t0_in); mp_uint_t t0 = MP_OBJ_SMALL_INT_VALUE(t0_in);
@ -93,7 +92,6 @@ STATIC mp_int_t ticks_diff(mp_obj_t t1_in, mp_obj_t t0_in) {
mp_int_t diff = ((t1 - t0 + _TICKS_HALFPERIOD) & _TICKS_MAX) - _TICKS_HALFPERIOD; mp_int_t diff = ((t1 - t0 + _TICKS_HALFPERIOD) & _TICKS_MAX) - _TICKS_HALFPERIOD;
return diff; return diff;
} }
#endif
STATIC int task_lt(mp_pairheap_t *n1, mp_pairheap_t *n2) { STATIC int task_lt(mp_pairheap_t *n1, mp_pairheap_t *n2) {
mp_obj_task_t *t1 = (mp_obj_task_t *)n1; mp_obj_task_t *t1 = (mp_obj_task_t *)n1;
@ -300,8 +298,13 @@ STATIC mp_obj_t task_getiter(mp_obj_t self_in, mp_obj_iter_buf_t *iter_buf) {
STATIC mp_obj_t task_iternext(mp_obj_t self_in) { STATIC mp_obj_t task_iternext(mp_obj_t self_in) {
mp_obj_task_t *self = MP_OBJ_TO_PTR(self_in); mp_obj_task_t *self = MP_OBJ_TO_PTR(self_in);
if (TASK_IS_DONE(self)) { if (TASK_IS_DONE(self)) {
if (self->data == mp_const_none) {
// Task finished but has already been sent to the loop's exception handler.
mp_raise_StopIteration(MP_OBJ_NULL);
} else {
// Task finished, raise return value to caller so it can continue. // Task finished, raise return value to caller so it can continue.
nlr_raise(self->data); nlr_raise(self->data);
}
} else { } else {
// Put calling task on waiting queue. // Put calling task on waiting queue.
mp_obj_t cur_task = mp_obj_dict_get(uasyncio_context, MP_OBJ_NEW_QSTR(MP_QSTR_cur_task)); mp_obj_t cur_task = mp_obj_dict_get(uasyncio_context, MP_OBJ_NEW_QSTR(MP_QSTR_cur_task));

View File

@ -11,6 +11,8 @@
#include "py/runtime.h" #include "py/runtime.h"
#include "py/binary.h" #include "py/binary.h"
#include "supervisor/shared/translate/translate.h"
static void check_not_unicode(const mp_obj_t arg) { static void check_not_unicode(const mp_obj_t arg) {
#if MICROPY_CPYTHON_COMPAT #if MICROPY_CPYTHON_COMPAT
if (mp_obj_is_str(arg)) { if (mp_obj_is_str(arg)) {

View File

@ -11,7 +11,7 @@
#include "py/objtuple.h" #include "py/objtuple.h"
#include "py/binary.h" #include "py/binary.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UCTYPES #if MICROPY_PY_UCTYPES

View File

@ -8,7 +8,7 @@
#include "py/runtime.h" #include "py/runtime.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UHASHLIB #if MICROPY_PY_UHASHLIB

View File

@ -6,7 +6,7 @@
#include "py/objlist.h" #include "py/objlist.h"
#include "py/runtime.h" #include "py/runtime.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UHEAPQ #if MICROPY_PY_UHEAPQ

View File

@ -13,7 +13,7 @@
#include "py/runtime.h" #include "py/runtime.h"
#include "py/stream.h" #include "py/stream.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UJSON #if MICROPY_PY_UJSON

View File

@ -9,13 +9,14 @@
#include <stdio.h> #include <stdio.h>
#include "py/ioctl.h" #include "py/stream.h"
#include "py/runtime.h" #include "py/runtime.h"
#include "py/obj.h" #include "py/obj.h"
#include "py/objlist.h" #include "py/objlist.h"
#include "py/stream.h" #include "py/stream.h"
#include "py/mperrno.h" #include "py/mperrno.h"
#include "py/mphal.h" #include "py/mphal.h"
#include "shared/runtime/interrupt_char.h"
// Flags for poll() // Flags for poll()
#define FLAG_ONESHOT (1) #define FLAG_ONESHOT (1)
@ -230,6 +231,9 @@ STATIC mp_uint_t poll_poll_internal(uint n_args, const mp_obj_t *args) {
break; break;
} }
RUN_BACKGROUND_TASKS; RUN_BACKGROUND_TASKS;
if (mp_hal_is_interrupted()) {
return 0;
}
} }
return n_ready; return n_ready;

View File

@ -10,7 +10,7 @@
#include "py/runtime.h" #include "py/runtime.h"
#include "py/smallint.h" #include "py/smallint.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UTIMEQ #if MICROPY_PY_UTIMEQ

View File

@ -10,7 +10,7 @@
#include "py/stream.h" #include "py/stream.h"
#include "py/mperrno.h" #include "py/mperrno.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if MICROPY_PY_UZLIB #if MICROPY_PY_UZLIB

View File

@ -141,6 +141,10 @@ class Task:
def __next__(self): def __next__(self):
if not self.state: if not self.state:
if self.data is None:
# Task finished but has already been sent to the loop's exception handler.
raise StopIteration
else:
# Task finished, raise return value to caller so it can continue. # Task finished, raise return value to caller so it can continue.
raise self.data raise self.data
else: else:

@ -1 +1 @@
Subproject commit 5d01882c41dbc4115bc94f0b61c093d5a6b812b6 Subproject commit f2dd2230c4fdf1aa5c7a160782efdde18e8204bb

View File

@ -20,7 +20,7 @@
#include "extmod/vfs_fat.h" #include "extmod/vfs_fat.h"
#include "shared/timeutils/timeutils.h" #include "shared/timeutils/timeutils.h"
#include "supervisor/filesystem.h" #include "supervisor/filesystem.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if FF_MAX_SS == FF_MIN_SS #if FF_MAX_SS == FF_MIN_SS
#define SECSIZE(fs) (FF_MIN_SS) #define SECSIZE(fs) (FF_MIN_SS)
@ -30,6 +30,11 @@
#define mp_obj_fat_vfs_t fs_user_mount_t #define mp_obj_fat_vfs_t fs_user_mount_t
// Factoring this common call saves about 90 bytes.
STATIC NORETURN void mp_raise_OSError_fresult(FRESULT res) {
mp_raise_OSError(fresult_to_errno_table[res]);
}
STATIC mp_import_stat_t fat_vfs_import_stat(void *vfs_in, const char *path) { STATIC mp_import_stat_t fat_vfs_import_stat(void *vfs_in, const char *path) {
fs_user_mount_t *vfs = vfs_in; fs_user_mount_t *vfs = vfs_in;
FILINFO fno; FILINFO fno;
@ -64,7 +69,7 @@ STATIC mp_obj_t fat_vfs_make_new(const mp_obj_type_t *type, size_t n_args, size_
// don't error out if no filesystem, to let mkfs()/mount() create one if wanted // don't error out if no filesystem, to let mkfs()/mount() create one if wanted
vfs->blockdev.flags |= MP_BLOCKDEV_FLAG_NO_FILESYSTEM; vfs->blockdev.flags |= MP_BLOCKDEV_FLAG_NO_FILESYSTEM;
} else if (res != FR_OK) { } else if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return MP_OBJ_FROM_PTR(vfs); return MP_OBJ_FROM_PTR(vfs);
@ -97,7 +102,7 @@ STATIC mp_obj_t fat_vfs_mkfs(mp_obj_t bdev_in) {
res = f_mkfs(&vfs->fatfs, FM_FAT32, 0, working_buf, sizeof(working_buf)); res = f_mkfs(&vfs->fatfs, FM_FAT32, 0, working_buf, sizeof(working_buf));
} }
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_const_none; return mp_const_none;
@ -172,7 +177,7 @@ STATIC mp_obj_t fat_vfs_ilistdir_func(size_t n_args, const mp_obj_t *args) {
iter->is_str = is_str_type; iter->is_str = is_str_type;
FRESULT res = f_opendir(&self->fatfs, &iter->dir, path); FRESULT res = f_opendir(&self->fatfs, &iter->dir, path);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return MP_OBJ_FROM_PTR(iter); return MP_OBJ_FROM_PTR(iter);
@ -188,7 +193,7 @@ STATIC mp_obj_t fat_vfs_remove_internal(mp_obj_t vfs_in, mp_obj_t path_in, mp_in
FRESULT res = f_stat(&self->fatfs, path, &fno); FRESULT res = f_stat(&self->fatfs, path, &fno);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
// check if path is a file or directory // check if path is a file or directory
@ -196,7 +201,7 @@ STATIC mp_obj_t fat_vfs_remove_internal(mp_obj_t vfs_in, mp_obj_t path_in, mp_in
res = f_unlink(&self->fatfs, path); res = f_unlink(&self->fatfs, path);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_const_none; return mp_const_none;
} else { } else {
@ -220,22 +225,7 @@ STATIC mp_obj_t fat_vfs_rename(mp_obj_t vfs_in, mp_obj_t path_in, mp_obj_t path_
const char *old_path = mp_obj_str_get_str(path_in); const char *old_path = mp_obj_str_get_str(path_in);
const char *new_path = mp_obj_str_get_str(path_out); const char *new_path = mp_obj_str_get_str(path_out);
// Check to see if we're moving a directory into itself. This occurs when we're moving a FRESULT res = f_rename(&self->fatfs, old_path, new_path);
// directory where the old path is a prefix of the new and the next character is a "/" and thus
// preserves the original directory name.
FILINFO fno;
FRESULT res = f_stat(&self->fatfs, old_path, &fno);
if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]);
}
if ((fno.fattrib & AM_DIR) != 0 &&
strlen(new_path) > strlen(old_path) &&
new_path[strlen(old_path)] == '/' &&
strncmp(old_path, new_path, strlen(old_path)) == 0) {
mp_raise_OSError(MP_EINVAL);
}
res = f_rename(&self->fatfs, old_path, new_path);
if (res == FR_EXIST) { if (res == FR_EXIST) {
// if new_path exists then try removing it (but only if it's a file) // if new_path exists then try removing it (but only if it's a file)
fat_vfs_remove_internal(vfs_in, path_out, 0); // 0 == file attribute fat_vfs_remove_internal(vfs_in, path_out, 0); // 0 == file attribute
@ -245,7 +235,7 @@ STATIC mp_obj_t fat_vfs_rename(mp_obj_t vfs_in, mp_obj_t path_in, mp_obj_t path_
if (res == FR_OK) { if (res == FR_OK) {
return mp_const_none; return mp_const_none;
} else { } else {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
} }
@ -259,7 +249,7 @@ STATIC mp_obj_t fat_vfs_mkdir(mp_obj_t vfs_in, mp_obj_t path_o) {
if (res == FR_OK) { if (res == FR_OK) {
return mp_const_none; return mp_const_none;
} else { } else {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
} }
STATIC MP_DEFINE_CONST_FUN_OBJ_2(fat_vfs_mkdir_obj, fat_vfs_mkdir); STATIC MP_DEFINE_CONST_FUN_OBJ_2(fat_vfs_mkdir_obj, fat_vfs_mkdir);
@ -273,7 +263,7 @@ STATIC mp_obj_t fat_vfs_chdir(mp_obj_t vfs_in, mp_obj_t path_in) {
FRESULT res = f_chdir(&self->fatfs, path); FRESULT res = f_chdir(&self->fatfs, path);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_const_none; return mp_const_none;
@ -286,7 +276,7 @@ STATIC mp_obj_t fat_vfs_getcwd(mp_obj_t vfs_in) {
char buf[MICROPY_ALLOC_PATH_MAX + 1]; char buf[MICROPY_ALLOC_PATH_MAX + 1];
FRESULT res = f_getcwd(&self->fatfs, buf, sizeof(buf)); FRESULT res = f_getcwd(&self->fatfs, buf, sizeof(buf));
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_obj_new_str(buf, strlen(buf)); return mp_obj_new_str(buf, strlen(buf));
} }
@ -307,7 +297,7 @@ STATIC mp_obj_t fat_vfs_stat(mp_obj_t vfs_in, mp_obj_t path_in) {
} else { } else {
FRESULT res = f_stat(&self->fatfs, path, &fno); FRESULT res = f_stat(&self->fatfs, path, &fno);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
} }
@ -357,7 +347,7 @@ STATIC mp_obj_t fat_vfs_statvfs(mp_obj_t vfs_in, mp_obj_t path_in) {
FATFS *fatfs = &self->fatfs; FATFS *fatfs = &self->fatfs;
FRESULT res = f_getfree(fatfs, &nclst); FRESULT res = f_getfree(fatfs, &nclst);
if (FR_OK != res) { if (FR_OK != res) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
mp_obj_tuple_t *t = MP_OBJ_TO_PTR(mp_obj_new_tuple(10, NULL)); mp_obj_tuple_t *t = MP_OBJ_TO_PTR(mp_obj_new_tuple(10, NULL));
@ -395,7 +385,7 @@ STATIC mp_obj_t vfs_fat_mount(mp_obj_t self_in, mp_obj_t readonly, mp_obj_t mkfs
res = f_mkfs(&self->fatfs, FM_FAT | FM_SFD, 0, working_buf, sizeof(working_buf)); res = f_mkfs(&self->fatfs, FM_FAT | FM_SFD, 0, working_buf, sizeof(working_buf));
} }
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
self->blockdev.flags &= ~MP_BLOCKDEV_FLAG_NO_FILESYSTEM; self->blockdev.flags &= ~MP_BLOCKDEV_FLAG_NO_FILESYSTEM;
@ -410,13 +400,54 @@ STATIC mp_obj_t vfs_fat_umount(mp_obj_t self_in) {
} }
STATIC MP_DEFINE_CONST_FUN_OBJ_1(fat_vfs_umount_obj, vfs_fat_umount); STATIC MP_DEFINE_CONST_FUN_OBJ_1(fat_vfs_umount_obj, vfs_fat_umount);
STATIC mp_obj_t vfs_fat_utime(mp_obj_t vfs_in, mp_obj_t path_in, mp_obj_t times_in) {
mp_obj_fat_vfs_t *self = MP_OBJ_TO_PTR(vfs_in);
const char *path = mp_obj_str_get_str(path_in);
if (!mp_obj_is_tuple_compatible(times_in)) {
mp_raise_type_arg(&mp_type_TypeError, times_in);
}
mp_obj_t *otimes;
mp_obj_get_array_fixed_n(times_in, 2, &otimes);
// Validate that both elements of the tuple are int and discard the second one
int time[2];
time[0] = mp_obj_get_int(otimes[0]);
time[1] = mp_obj_get_int(otimes[1]);
timeutils_struct_time_t tm;
timeutils_seconds_since_epoch_to_struct_time(time[0], &tm);
FILINFO fno;
fno.fdate = (WORD)(((tm.tm_year - 1980) * 512U) | tm.tm_mon * 32U | tm.tm_mday);
fno.ftime = (WORD)(tm.tm_hour * 2048U | tm.tm_min * 32U | tm.tm_sec / 2U);
FRESULT res = f_utime(&self->fatfs, path, &fno);
if (res != FR_OK) {
mp_raise_OSError_fresult(res);
}
return mp_const_none;
}
STATIC MP_DEFINE_CONST_FUN_OBJ_3(fat_vfs_utime_obj, vfs_fat_utime);
STATIC mp_obj_t vfs_fat_getreadonly(mp_obj_t self_in) {
fs_user_mount_t *self = MP_OBJ_TO_PTR(self_in);
return mp_obj_new_bool(!filesystem_is_writable_by_python(self));
}
STATIC MP_DEFINE_CONST_FUN_OBJ_1(fat_vfs_getreadonly_obj, vfs_fat_getreadonly);
STATIC const mp_obj_property_t fat_vfs_readonly_obj = {
.base.type = &mp_type_property,
.proxy = {(mp_obj_t)&fat_vfs_getreadonly_obj,
MP_ROM_NONE,
MP_ROM_NONE},
};
#if MICROPY_FATFS_USE_LABEL #if MICROPY_FATFS_USE_LABEL
STATIC mp_obj_t vfs_fat_getlabel(mp_obj_t self_in) { STATIC mp_obj_t vfs_fat_getlabel(mp_obj_t self_in) {
fs_user_mount_t *self = MP_OBJ_TO_PTR(self_in); fs_user_mount_t *self = MP_OBJ_TO_PTR(self_in);
char working_buf[12]; char working_buf[12];
FRESULT res = f_getlabel(&self->fatfs, working_buf, NULL); FRESULT res = f_getlabel(&self->fatfs, working_buf, NULL);
if (res != FR_OK) { if (res != FR_OK) {
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_obj_new_str(working_buf, strlen(working_buf)); return mp_obj_new_str(working_buf, strlen(working_buf));
} }
@ -431,7 +462,7 @@ STATIC mp_obj_t vfs_fat_setlabel(mp_obj_t self_in, mp_obj_t label_in) {
if (res == FR_WRITE_PROTECTED) { if (res == FR_WRITE_PROTECTED) {
mp_raise_msg(&mp_type_OSError, MP_ERROR_TEXT("Read-only filesystem")); mp_raise_msg(&mp_type_OSError, MP_ERROR_TEXT("Read-only filesystem"));
} }
mp_raise_OSError(fresult_to_errno_table[res]); mp_raise_OSError_fresult(res);
} }
return mp_const_none; return mp_const_none;
} }
@ -440,7 +471,7 @@ STATIC const mp_obj_property_t fat_vfs_label_obj = {
.base.type = &mp_type_property, .base.type = &mp_type_property,
.proxy = {(mp_obj_t)&fat_vfs_getlabel_obj, .proxy = {(mp_obj_t)&fat_vfs_getlabel_obj,
(mp_obj_t)&fat_vfs_setlabel_obj, (mp_obj_t)&fat_vfs_setlabel_obj,
(mp_obj_t)MP_ROM_NONE}, MP_ROM_NONE},
}; };
#endif #endif
@ -461,6 +492,8 @@ STATIC const mp_rom_map_elem_t fat_vfs_locals_dict_table[] = {
{ MP_ROM_QSTR(MP_QSTR_statvfs), MP_ROM_PTR(&fat_vfs_statvfs_obj) }, { MP_ROM_QSTR(MP_QSTR_statvfs), MP_ROM_PTR(&fat_vfs_statvfs_obj) },
{ MP_ROM_QSTR(MP_QSTR_mount), MP_ROM_PTR(&vfs_fat_mount_obj) }, { MP_ROM_QSTR(MP_QSTR_mount), MP_ROM_PTR(&vfs_fat_mount_obj) },
{ MP_ROM_QSTR(MP_QSTR_umount), MP_ROM_PTR(&fat_vfs_umount_obj) }, { MP_ROM_QSTR(MP_QSTR_umount), MP_ROM_PTR(&fat_vfs_umount_obj) },
{ MP_ROM_QSTR(MP_QSTR_utime), MP_ROM_PTR(&fat_vfs_utime_obj) },
{ MP_ROM_QSTR(MP_QSTR_readonly), MP_ROM_PTR(&fat_vfs_readonly_obj) },
#if MICROPY_FATFS_USE_LABEL #if MICROPY_FATFS_USE_LABEL
{ MP_ROM_QSTR(MP_QSTR_label), MP_ROM_PTR(&fat_vfs_label_obj) }, { MP_ROM_QSTR(MP_QSTR_label), MP_ROM_PTR(&fat_vfs_label_obj) },
#endif #endif

View File

@ -182,7 +182,7 @@ STATIC mp_obj_t file_open(fs_user_mount_t *vfs, const mp_obj_type_t *type, mp_ar
} }
if (rwxa_count != 1 || plus_count > 1 || bt_count > 1 || bad_mode) { if (rwxa_count != 1 || plus_count > 1 || bt_count > 1 || bad_mode) {
mp_raise_ValueError(translate("Invalid mode")); mp_arg_error_invalid(MP_QSTR_mode);
} }
assert(vfs != NULL); assert(vfs != NULL);

View File

@ -8,7 +8,7 @@
#include "py/runtime.h" #include "py/runtime.h"
#include "py/stream.h" #include "py/stream.h"
#include "extmod/vfs_posix.h" #include "extmod/vfs_posix.h"
#include "supervisor/shared/translate.h" #include "supervisor/shared/translate/translate.h"
#if (defined(MICROPY_VFS_POSIX) && MICROPY_VFS_POSIX) || (defined(MICROPY_VFS_POSIX_FILE) && MICROPY_VFS_POSIX_FILE) #if (defined(MICROPY_VFS_POSIX) && MICROPY_VFS_POSIX) || (defined(MICROPY_VFS_POSIX_FILE) && MICROPY_VFS_POSIX_FILE)

@ -1 +1 @@
Subproject commit baab505fd4dcc54d8e9d45e6463c68bdc6d100eb Subproject commit 9ddd59650598b7a0641d70aabcc8aab71799cb93

@ -1 +1 @@
Subproject commit beec03065712cd62f79e839d5cf8f7c9847fc3b1 Subproject commit e07e1853d7e995b9797a064c098bccc5c384632e

@ -1 +1 @@
Subproject commit 859a7d403e4e79ec1c8915c81ba581dbaab8a4ac Subproject commit b06b47037aed97475b1676b104d1f4b05c3f5e86

@ -1 +1 @@
Subproject commit a8abc3aa8dece6c4d0152b001dfca7d2c279f899 Subproject commit 9ace770b048be9ab0da4a154af279dbb643bbdb0

@ -1 +1 @@
Subproject commit b04042addd47c2645e139032b02a3b9ddeeb3425 Subproject commit 47f848f13f75d2f62d16407edaaf6dd0ec1fc3cc

@ -1 +1 @@
Subproject commit 938f6bb335ba5e4c56a8062c591ff9f3c18c4297 Subproject commit a37c7cc83685f2ff84a171a519207567a75d0947

@ -1 +1 @@
Subproject commit 8e7e111a9ff39d3f4311caa7babeb451422c759f Subproject commit ab0ffa938dfa7eb1fd7260353a7a4e28f55e537a

@ -1 +1 @@
Subproject commit df2449815433e05ea0f89c19518ccde7a10a2faa Subproject commit e6a9a0140ed44ef5f15d8040fce35b5319c1f216

@ -1 +1 @@
Subproject commit 708bb0c82c7b075bd6912c97231aea880b1a1cb8 Subproject commit cf2b173d0fc3ac2cd961754c6adf8f614a1c7c39

@ -1 +1 @@
Subproject commit 0bd04a235556979bd13a373821a6602445fe132b Subproject commit 911201504a269dbfc49b04ca59bc54adabd4716a

@ -1 +1 @@
Subproject commit eb6124fdff59b98d7d49dd86072df99c0e97167b Subproject commit 187279a95e5cdd634d233af59352558cea4c1227

@ -1 +1 @@
Subproject commit 13775b058422085762874fde8e587f2e9f066855 Subproject commit ee6bfcf9e676eb435c8890db37f07719984a60a1

@ -1 +1 @@
Subproject commit f6cdec74b64112016c459abe4a5d31a3b34caeb3 Subproject commit 8eedf860beca0d32219189b72ea6fc8eea7e66db

@ -1 +1 @@
Subproject commit bccbe3da75f42b540b3faebb9d5a2d1ccf5e7147 Subproject commit 3d7d404a1cafc02f6c3391b100157490132e5c5f

@ -1 +1 @@
Subproject commit 2fddabcaf0df1763111ed9dbf9e2d4cdb5b0434e Subproject commit 93c7e0ed55e7ed011908ac9a1c0f8228f0f4323b

@ -1 +1 @@
Subproject commit 9771c9369c7e251f514eb26abcfcea1e891e6f27 Subproject commit 340c62ef6ce867b3924d166afc3d2a171680f799

@ -0,0 +1 @@
Subproject commit 5433ba3760ca605267223de883a44cb8394f40a5

@ -1 +1 @@
Subproject commit 29816fbe98c012ea0a1b5cae7f07aeae7ebf8b52 Subproject commit 38bd02f014403954ab52154e3877e502d83862dc

Some files were not shown because too many files have changed in this diff Show More