Compare commits
No commits in common. "main" and "7.2.x" have entirely different histories.
@ -1,7 +0,0 @@
|
|||||||
#define MICROPY_HW_BOARD_NAME "BLOK"
|
|
||||||
USB_PRODUCT = "BLOK"
|
|
||||||
uint32_t THI = (*(uint32_t *)FUSES_HOT_TEMP_VAL_INT_ADDR & FUSES_HOT_TEMP_VAL_INT_Msk) >> FUSES_HOT_TEMP_VAL_INT_Pos;
|
|
||||||
float TH = THI + convert_dec_to_frac(THD);
|
|
||||||
print(binascii.b2a_base64(b"fo"))
|
|
||||||
# again, neither will "there" or "wither", since they have "the"
|
|
||||||
i1Qb$TE"rl
|
|
@ -1,28 +0,0 @@
|
|||||||
ans
|
|
||||||
ure
|
|
||||||
clen
|
|
||||||
ser
|
|
||||||
endianess
|
|
||||||
pris
|
|
||||||
synopsys
|
|
||||||
reenable
|
|
||||||
dout
|
|
||||||
inout
|
|
||||||
wel
|
|
||||||
iput
|
|
||||||
hsi
|
|
||||||
astroid
|
|
||||||
busses
|
|
||||||
cyphertext
|
|
||||||
dum
|
|
||||||
deque
|
|
||||||
deques
|
|
||||||
extint
|
|
||||||
shs
|
|
||||||
pass-thru
|
|
||||||
numer
|
|
||||||
arithmetics
|
|
||||||
ftbfs
|
|
||||||
straightaway
|
|
||||||
ftbs
|
|
||||||
ftb
|
|
10
.codespellrc
10
.codespellrc
@ -1,10 +0,0 @@
|
|||||||
# See: https://github.com/codespell-project/codespell#using-a-config-file
|
|
||||||
[codespell]
|
|
||||||
# In the event of a false positive, add the problematic word, in all lowercase, to 'ignore-words.txt' (one word per line).
|
|
||||||
# Or copy & paste the whole problematic line to 'exclude-file.txt'
|
|
||||||
ignore-words = .codespell/ignore-words.txt
|
|
||||||
exclude-file = .codespell/exclude-file.txt
|
|
||||||
check-filenames =
|
|
||||||
check-hidden =
|
|
||||||
count =
|
|
||||||
skip = .cproject,.git,./lib,./locale,ACKNOWLEDGEMENTS
|
|
@ -1,31 +0,0 @@
|
|||||||
Build CircuitPython in a Github-Devcontainer
|
|
||||||
============================================
|
|
||||||
|
|
||||||
To build CircuitPython within a Github-Devcontainer, you need to perform
|
|
||||||
the following steps.
|
|
||||||
|
|
||||||
1. checkout the code to a devcontainer
|
|
||||||
|
|
||||||
- click on the green "<> Code"-button
|
|
||||||
- select the Codespaces-tab
|
|
||||||
- choose "+ new with options..." from the "..."-menu
|
|
||||||
- in the following screen select the branch and then
|
|
||||||
- select ".devcontainer/cortex-m/devcontainer.json" instead
|
|
||||||
of "Default Codespaces configuration"
|
|
||||||
- update region as necessary
|
|
||||||
- finally, click on the green "Create codespace" button
|
|
||||||
|
|
||||||
2. Your codespace is created. Cloning the images is quite fast, but
|
|
||||||
preparing it for CircuitPython-development takes about 10 minutes.
|
|
||||||
Note that this is a one-time task.
|
|
||||||
|
|
||||||
3. During creation, you can run the command
|
|
||||||
`tail -f /workspaces/.codespaces/.persistedshare/creation.log`
|
|
||||||
to see what is going on.
|
|
||||||
|
|
||||||
4. To actually build CircuitPython, run
|
|
||||||
|
|
||||||
cd ports/raspberrypi
|
|
||||||
make -j $(nproc) BOARD=whatever TRANSLATION=xx_XX
|
|
||||||
|
|
||||||
This takes about 2m40s.
|
|
@ -1,23 +0,0 @@
|
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
|
||||||
// README at: https://github.com/devcontainers/templates/tree/main/src/universal
|
|
||||||
{
|
|
||||||
"name": "CircuitPython Cortex-M Build-Environment (base: Default Linux Universal)",
|
|
||||||
"image": "mcr.microsoft.com/devcontainers/universal:2-linux",
|
|
||||||
"postCreateCommand": ".devcontainer/cortex-m/on-create.sh",
|
|
||||||
"remoteEnv": { "PATH": "/workspaces/gcc-arm-none-eabi/bin:${containerEnv:PATH}" }
|
|
||||||
|
|
||||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
|
||||||
// "features": {},
|
|
||||||
|
|
||||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
|
||||||
// "forwardPorts": [],
|
|
||||||
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
|
||||||
// "postCreateCommand": "uname -a",
|
|
||||||
|
|
||||||
// Configure tool-specific properties.
|
|
||||||
// "customizations": {},
|
|
||||||
|
|
||||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
|
||||||
// "remoteUser": "root"
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
# on-create.sh: postCreateCommand-hook for devcontainer.json (Cortex-M build)
|
|
||||||
#
|
|
||||||
# Author: Bernhard Bablok
|
|
||||||
#
|
|
||||||
# -----------------------------------------------------------------------------
|
|
||||||
|
|
||||||
echo -e "[on-create.sh] downloading and installing gcc-arm-non-eabi toolchain"
|
|
||||||
cd /workspaces
|
|
||||||
wget -qO gcc-arm-none-eabi.tar.bz2 https://adafru.it/Pid
|
|
||||||
tar -xjf gcc-arm-none-eabi.tar.bz2
|
|
||||||
ln -s gcc-arm-none-eabi-10-2020-q4-major gcc-arm-none-eabi
|
|
||||||
rm -f /workspaces/gcc-arm-none-eabi.tar.bz2
|
|
||||||
export PATH=/workspaces/gcc-arm-none-eabi/bin:$PATH
|
|
||||||
|
|
||||||
# add repository and install tools
|
|
||||||
echo -e "[on-create.sh] adding pybricks/ppa"
|
|
||||||
sudo add-apt-repository -y ppa:pybricks/ppa
|
|
||||||
echo -e "[on-create.sh] installing uncrustify and mtools"
|
|
||||||
sudo apt-get -y install uncrustify mtools
|
|
||||||
|
|
||||||
# dosfstools >= 4.2 needed, standard repo only has 4.1
|
|
||||||
echo -e "[on-create.sh] downloading and installing dosfstools"
|
|
||||||
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz
|
|
||||||
tar -xzf dosfstools-4.2.tar.gz
|
|
||||||
cd dosfstools-4.2/
|
|
||||||
./configure
|
|
||||||
make -j $(nproc)
|
|
||||||
sudo make install
|
|
||||||
cd /workspaces
|
|
||||||
rm -fr /workspaces/dosfstools-4.2 /workspaces/dosfstools-4.2.tar.gz
|
|
||||||
|
|
||||||
# prepare source-code tree
|
|
||||||
cd /workspaces/circuitpython/
|
|
||||||
echo -e "[on-create.sh] fetching submodules"
|
|
||||||
make fetch-all-submodules
|
|
||||||
echo -e "[on-create.sh] fetching tags"
|
|
||||||
git fetch --tags --recurse-submodules=no --shallow-since="2021-07-01" https://github.com/adafruit/circuitpython HEAD
|
|
||||||
|
|
||||||
# additional python requirements
|
|
||||||
echo -e "[on-create.sh] pip-installing requirements"
|
|
||||||
pip install --upgrade -r requirements-dev.txt
|
|
||||||
pip install --upgrade -r requirements-doc.txt
|
|
||||||
|
|
||||||
# add pre-commit
|
|
||||||
echo -e "[on-create.sh] installing pre-commit"
|
|
||||||
pre-commit install
|
|
||||||
|
|
||||||
# create cross-compiler
|
|
||||||
echo -e "[on-create.sh] building mpy-cross"
|
|
||||||
make -j $(nproc) -C mpy-cross # time: about 36 sec
|
|
||||||
|
|
||||||
# that's it!
|
|
||||||
echo -e "[on-create.sh] setup complete"
|
|
||||||
|
|
||||||
#commands to actually build CP:
|
|
||||||
#cd ports/raspberrypi
|
|
||||||
#time make -j $(nproc) BOARD=pimoroni_tufty2040 TRANSLATION=de_DE
|
|
@ -1,24 +1,3 @@
|
|||||||
# all: Fix various spelling mistakes found by codespell 2.2.6.
|
|
||||||
cf490a70917a1b2d38ba9b58e763e0837d0f7ca7
|
|
||||||
|
|
||||||
# all: Fix spelling mistakes based on codespell check.
|
|
||||||
b1229efbd1509654dec6053865ab828d769e29db
|
|
||||||
|
|
||||||
# top: Update Python formatting to black "2023 stable style".
|
|
||||||
8b2748269244304854b3462cb8902952b4dcb892
|
|
||||||
|
|
||||||
# all: Reformat remaining C code that doesn't have a space after a comma.
|
|
||||||
5b700b0af90591d6b1a2c087bb8de6b7f1bfdd2d
|
|
||||||
|
|
||||||
# ports: Reformat more C and Python source code.
|
|
||||||
5c32111fa0e31e451b0f1666bdf926be2fdfd82c
|
|
||||||
|
|
||||||
# all: Update Python formatting to latest Black version 22.1.0.
|
|
||||||
ab2923dfa1174dc177f0a90cb00a7e4ff87958d2
|
|
||||||
|
|
||||||
# all: Update Python formatting to latest Black version 21.12b0.
|
|
||||||
3770fab33449a5dadf8eb06edfae0767e75320a6
|
|
||||||
|
|
||||||
# tools/gen-cpydiff.py: Fix formatting of doc strings for new Black.
|
# tools/gen-cpydiff.py: Fix formatting of doc strings for new Black.
|
||||||
0f78c36c5aa458a954eed39a46942209107a553e
|
0f78c36c5aa458a954eed39a46942209107a553e
|
||||||
|
|
||||||
|
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -6,7 +6,7 @@ body:
|
|||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: >-
|
value: >-
|
||||||
Thanks for testing out CircuitPython! Now that you have encountered a
|
Thanks! for testing out CircuitPython. Now that you have encountered a
|
||||||
bug... you can file a report for it.
|
bug... you can file a report for it.
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: firmware
|
id: firmware
|
||||||
|
59
.github/actions/deps/external/action.yml
vendored
59
.github/actions/deps/external/action.yml
vendored
@ -1,59 +0,0 @@
|
|||||||
name: Fetch external deps
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
action:
|
|
||||||
required: false
|
|
||||||
default: restore
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- cache
|
|
||||||
- restore
|
|
||||||
|
|
||||||
port:
|
|
||||||
required: false
|
|
||||||
default: none
|
|
||||||
type: string
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
# arm
|
|
||||||
- name: Get arm toolchain
|
|
||||||
if: >-
|
|
||||||
inputs.port != 'none' &&
|
|
||||||
inputs.port != 'litex' &&
|
|
||||||
inputs.port != 'espressif'
|
|
||||||
uses: carlosperate/arm-none-eabi-gcc-action@v1
|
|
||||||
with:
|
|
||||||
release: '13.2.Rel1'
|
|
||||||
|
|
||||||
# espressif
|
|
||||||
- name: Get espressif toolchain
|
|
||||||
if: inputs.port == 'espressif'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y ninja-build
|
|
||||||
shell: bash
|
|
||||||
- name: Install IDF tools
|
|
||||||
if: inputs.port == 'espressif'
|
|
||||||
run: |
|
|
||||||
$IDF_PATH/install.sh
|
|
||||||
rm -rf $IDF_TOOLS_PATH/dist
|
|
||||||
shell: bash
|
|
||||||
- name: Set environment
|
|
||||||
if: inputs.port == 'espressif'
|
|
||||||
run: |
|
|
||||||
source $IDF_PATH/export.sh
|
|
||||||
echo >> $GITHUB_ENV "IDF_PYTHON_ENV_PATH=$IDF_PYTHON_ENV_PATH"
|
|
||||||
echo >> $GITHUB_PATH "$PATH"
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
# common
|
|
||||||
- name: Cache python dependencies
|
|
||||||
if: inputs.port != 'espressif'
|
|
||||||
uses: ./.github/actions/deps/python
|
|
||||||
with:
|
|
||||||
action: ${{ inputs.action }}
|
|
||||||
- name: Install python dependencies
|
|
||||||
run: pip install -r requirements-dev.txt
|
|
||||||
shell: bash
|
|
36
.github/actions/deps/ports/action.yml
vendored
36
.github/actions/deps/ports/action.yml
vendored
@ -1,36 +0,0 @@
|
|||||||
name: Fetch port deps
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
board:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
port:
|
|
||||||
value: ${{ steps.board-to-port.outputs.port }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Board to port
|
|
||||||
id: board-to-port
|
|
||||||
run: |
|
|
||||||
PORT=$(find ports/*/boards/ -type d -name ${{ inputs.board }} | sed 's/^ports\///g;s/\/boards.*//g')
|
|
||||||
if [ -z $PORT ]; then (exit 1); else echo >> $GITHUB_OUTPUT "port=$PORT"; fi
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Set up broadcom
|
|
||||||
if: steps.board-to-port.outputs.port == 'broadcom'
|
|
||||||
uses: ./.github/actions/deps/ports/broadcom
|
|
||||||
|
|
||||||
- name: Set up espressif
|
|
||||||
if: steps.board-to-port.outputs.port == 'espressif'
|
|
||||||
uses: ./.github/actions/deps/ports/espressif
|
|
||||||
|
|
||||||
- name: Set up litex
|
|
||||||
if: steps.board-to-port.outputs.port == 'litex'
|
|
||||||
uses: ./.github/actions/deps/ports/litex
|
|
||||||
|
|
||||||
- name: Set up nrf
|
|
||||||
if: steps.board-to-port.outputs.port == 'nrf'
|
|
||||||
uses: ./.github/actions/deps/ports/nrf
|
|
22
.github/actions/deps/ports/broadcom/action.yml
vendored
22
.github/actions/deps/ports/broadcom/action.yml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: Fetch broadcom port deps
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get broadcom toolchain
|
|
||||||
run: |
|
|
||||||
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
|
|
||||||
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y mtools
|
|
||||||
shell: bash
|
|
||||||
- name: Install mkfs.fat
|
|
||||||
run: |
|
|
||||||
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz
|
|
||||||
tar -xaf dosfstools-4.2.tar.gz
|
|
||||||
cd dosfstools-4.2
|
|
||||||
./configure
|
|
||||||
make -j 2
|
|
||||||
cd src
|
|
||||||
echo >> $GITHUB_PATH $(pwd)
|
|
||||||
shell: bash
|
|
36
.github/actions/deps/ports/espressif/action.yml
vendored
36
.github/actions/deps/ports/espressif/action.yml
vendored
@ -1,36 +0,0 @@
|
|||||||
name: Fetch espressif port deps
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Set IDF env
|
|
||||||
run: |
|
|
||||||
echo >> $GITHUB_ENV "IDF_PATH=$GITHUB_WORKSPACE/ports/espressif/esp-idf"
|
|
||||||
echo >> $GITHUB_ENV "IDF_TOOLS_PATH=$GITHUB_WORKSPACE/.idf_tools"
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Get IDF commit
|
|
||||||
id: idf-commit
|
|
||||||
run: |
|
|
||||||
COMMIT=$(git submodule status ports/espressif/esp-idf | grep -o -P '(?<=^-).*(?= )')
|
|
||||||
echo "$COMMIT"
|
|
||||||
echo "commit=$COMMIT" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Cache IDF submodules
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
.git/modules/ports/espressif/esp-idf
|
|
||||||
ports/espressif/esp-idf
|
|
||||||
key: submodules-idf-${{ steps.idf-commit.outputs.commit }}
|
|
||||||
|
|
||||||
- name: Cache IDF tools
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ${{ env.IDF_TOOLS_PATH }}
|
|
||||||
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-idf-${{ steps.idf-commit.outputs.commit }}
|
|
||||||
|
|
||||||
- name: Initialize IDF submodules
|
|
||||||
run: git submodule update --init --depth=1 --recursive $IDF_PATH
|
|
||||||
shell: bash
|
|
10
.github/actions/deps/ports/litex/action.yml
vendored
10
.github/actions/deps/ports/litex/action.yml
vendored
@ -1,10 +0,0 @@
|
|||||||
name: Fetch litex port deps
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get litex toolchain
|
|
||||||
run: |
|
|
||||||
wget https://static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
|
|
||||||
sudo tar -C /usr --strip-components=1 -xaf riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
|
|
||||||
shell: bash
|
|
17
.github/actions/deps/ports/nrf/action.yml
vendored
17
.github/actions/deps/ports/nrf/action.yml
vendored
@ -1,17 +0,0 @@
|
|||||||
name: Fetch nrf port deps
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Get nrfutil 7+
|
|
||||||
run: |
|
|
||||||
wget https://developer.nordicsemi.com/.pc-tools/nrfutil/x64-linux/nrfutil
|
|
||||||
chmod +x nrfutil
|
|
||||||
./nrfutil install nrf5sdk-tools
|
|
||||||
mkdir -p $HOME/.local/bin
|
|
||||||
mv nrfutil $HOME/.local/bin
|
|
||||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
|
||||||
shell: bash
|
|
||||||
- name: Print nrfutil version
|
|
||||||
run: nrfutil -V
|
|
||||||
shell: bash
|
|
42
.github/actions/deps/python/action.yml
vendored
42
.github/actions/deps/python/action.yml
vendored
@ -1,42 +0,0 @@
|
|||||||
name: Fetch python deps
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
action:
|
|
||||||
description: The cache action to use
|
|
||||||
required: false
|
|
||||||
default: restore
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- cache
|
|
||||||
- restore
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Cache python dependencies
|
|
||||||
id: cache-python-deps
|
|
||||||
if: inputs.action == 'cache'
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: .cp_tools
|
|
||||||
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-cp-${{ hashFiles('requirements-dev.txt') }}
|
|
||||||
|
|
||||||
- name: Restore python dependencies
|
|
||||||
id: restore-python-deps
|
|
||||||
if: inputs.action == 'restore'
|
|
||||||
uses: actions/cache/restore@v3
|
|
||||||
with:
|
|
||||||
path: .cp_tools
|
|
||||||
key: ${{ runner.os }}-${{ env.pythonLocation }}-tools-cp-${{ hashFiles('requirements-dev.txt') }}
|
|
||||||
|
|
||||||
- name: Set up venv
|
|
||||||
if: inputs.action == 'cache' && !steps.cache-python-deps.outputs.cache-hit
|
|
||||||
run: python -m venv .cp_tools
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Activate venv
|
|
||||||
if: inputs.action == 'cache' || (inputs.action == 'restore' && steps.restore-python-deps.outputs.cache-hit)
|
|
||||||
run: |
|
|
||||||
source .cp_tools/bin/activate
|
|
||||||
echo >> $GITHUB_PATH "$PATH"
|
|
||||||
shell: bash
|
|
87
.github/actions/deps/submodules/action.yml
vendored
87
.github/actions/deps/submodules/action.yml
vendored
@ -1,87 +0,0 @@
|
|||||||
name: 'Fetch Submodules'
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
target:
|
|
||||||
description: 'The target for ci_fetch_deps'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
|
|
||||||
submodules:
|
|
||||||
description: 'The submodules to cache'
|
|
||||||
required: false
|
|
||||||
default: '["extmod/ulab", "lib/", "tools/"]'
|
|
||||||
type: string
|
|
||||||
|
|
||||||
action:
|
|
||||||
description: 'The cache action to use'
|
|
||||||
required: false
|
|
||||||
default: 'restore'
|
|
||||||
type: choice
|
|
||||||
options:
|
|
||||||
- cache
|
|
||||||
- restore
|
|
||||||
|
|
||||||
version:
|
|
||||||
description: 'Whether to generate CP version'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
frozen:
|
|
||||||
description: 'Whether frozen submodules were fetched'
|
|
||||||
value: ${{ steps.cp-deps.outputs.frozen_tags }}
|
|
||||||
|
|
||||||
version:
|
|
||||||
description: 'The CP version'
|
|
||||||
value: ${{ steps.cp-version.outputs.cp-version }}
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: "composite"
|
|
||||||
steps:
|
|
||||||
- name: Create submodule status
|
|
||||||
id: create-submodule-status
|
|
||||||
run: |
|
|
||||||
git submodule status ${{ join(fromJSON(inputs.submodules), ' ') }} >> submodule_status
|
|
||||||
echo $(cut -d ' ' -f 2 submodule_status) | echo "submodules=[\"$(sed "s/ /\", \"/g")\"]" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Cache submodules
|
|
||||||
if: ${{ inputs.action == 'cache' }}
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: ".git/modules/\n${{ join(fromJSON(steps.create-submodule-status.outputs.submodules), '\n') }}"
|
|
||||||
key: submodules-common-${{ hashFiles('submodule_status') }}
|
|
||||||
enableCrossOsArchive: true
|
|
||||||
|
|
||||||
- name: Restore submodules
|
|
||||||
if: ${{ inputs.action == 'restore' }}
|
|
||||||
uses: actions/cache/restore@v3
|
|
||||||
with:
|
|
||||||
path: ".git/modules/\n${{ join(fromJSON(steps.create-submodule-status.outputs.submodules), '\n') }}"
|
|
||||||
key: submodules-common-${{ hashFiles('submodule_status') }}
|
|
||||||
enableCrossOsArchive: true
|
|
||||||
|
|
||||||
- name: Remove submodule status
|
|
||||||
run: rm submodule_status
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: CircuitPython dependencies
|
|
||||||
id: cp-deps
|
|
||||||
run: python tools/ci_fetch_deps.py ${{ inputs.target || matrix.board || github.job }}
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: CircuitPython version
|
|
||||||
id: cp-version
|
|
||||||
if: ${{ inputs.version == 'true' }}
|
|
||||||
run: |
|
|
||||||
echo "::group::Fetch history and tags"
|
|
||||||
git fetch --no-recurse-submodules --shallow-since="2021-07-01" --tags https://github.com/adafruit/circuitpython HEAD
|
|
||||||
git fetch --no-recurse-submodules --shallow-since="2021-07-01" origin $GITHUB_SHA
|
|
||||||
git repack -d
|
|
||||||
echo "::endgroup::"
|
|
||||||
CP_VERSION=$(tools/describe)
|
|
||||||
echo "$CP_VERSION"
|
|
||||||
echo "CP_VERSION=$CP_VERSION" >> $GITHUB_ENV
|
|
||||||
echo "cp-version=$CP_VERSION" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
42
.github/actions/mpy_cross/action.yml
vendored
42
.github/actions/mpy_cross/action.yml
vendored
@ -1,42 +0,0 @@
|
|||||||
name: Set up mpy-cross
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
download:
|
|
||||||
required: false
|
|
||||||
default: true
|
|
||||||
type: boolean
|
|
||||||
cp-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Download mpy-cross
|
|
||||||
id: download-mpy-cross
|
|
||||||
if: inputs.download == 'true'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: mpy-cross
|
|
||||||
path: mpy-cross/build
|
|
||||||
|
|
||||||
- name: Make mpy-cross executable
|
|
||||||
if: inputs.download == 'true' && steps.download-mpy-cross.outcome == 'success'
|
|
||||||
run: sudo chmod +x mpy-cross/build/mpy-cross
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- name: Build mpy-cross
|
|
||||||
if: inputs.download == 'false' || steps.download-mpy-cross.outcome == 'failure'
|
|
||||||
run: make -C mpy-cross -j2
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ inputs.cp-version }}
|
|
||||||
|
|
||||||
- name: Upload mpy-cross
|
|
||||||
if: inputs.download == 'false' || steps.download-mpy-cross.outcome == 'failure'
|
|
||||||
continue-on-error: true
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: mpy-cross
|
|
||||||
path: mpy-cross/build/mpy-cross
|
|
33
.github/actions/upload_aws/action.yml
vendored
33
.github/actions/upload_aws/action.yml
vendored
@ -1,33 +0,0 @@
|
|||||||
name: Upload to AWS S3
|
|
||||||
|
|
||||||
inputs:
|
|
||||||
source:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
destination:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
|
|
||||||
AWS_ACCESS_KEY_ID:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: true
|
|
||||||
|
|
||||||
runs:
|
|
||||||
using: composite
|
|
||||||
steps:
|
|
||||||
- name: Upload to S3
|
|
||||||
if: >-
|
|
||||||
(github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') ||
|
|
||||||
(github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
|
||||||
run: >-
|
|
||||||
[ -z "$AWS_ACCESS_KEY_ID" ] ||
|
|
||||||
aws s3 cp ${{ inputs.source }} s3://adafruit-circuit-python/bin/${{ inputs.destination }}
|
|
||||||
${{ endsWith(inputs.source, '/') && '--recursive' || '' }} --no-progress --region us-east-1
|
|
||||||
env:
|
|
||||||
AWS_PAGER: ''
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ inputs.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ inputs.AWS_SECRET_ACCESS_KEY }}
|
|
||||||
shell: bash
|
|
87
.github/workflows/build-boards.yml
vendored
87
.github/workflows/build-boards.yml
vendored
@ -1,87 +0,0 @@
|
|||||||
name: Build boards
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
boards:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
cp-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
board:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ inputs.cp-version }}
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
board: ${{ fromJSON(inputs.boards) }}
|
|
||||||
steps:
|
|
||||||
- name: Set up repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: false
|
|
||||||
fetch-depth: 1
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- name: Set up port
|
|
||||||
id: set-up-port
|
|
||||||
uses: ./.github/actions/deps/ports
|
|
||||||
with:
|
|
||||||
board: ${{ matrix.board }}
|
|
||||||
- name: Set up submodules
|
|
||||||
id: set-up-submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
- name: Set up external
|
|
||||||
uses: ./.github/actions/deps/external
|
|
||||||
with:
|
|
||||||
port: ${{ steps.set-up-port.outputs.port }}
|
|
||||||
- name: Set up mpy-cross
|
|
||||||
if: steps.set-up-submodules.outputs.frozen == 'True'
|
|
||||||
uses: ./.github/actions/mpy_cross
|
|
||||||
with:
|
|
||||||
cp-version: ${{ inputs.cp-version }}
|
|
||||||
|
|
||||||
- name: Versions
|
|
||||||
run: |
|
|
||||||
gcc --version
|
|
||||||
python3 --version
|
|
||||||
cmake --version || true
|
|
||||||
ninja --version || true
|
|
||||||
aarch64-none-elf-gcc --version || true
|
|
||||||
arm-none-eabi-gcc --version || true
|
|
||||||
xtensa-esp32-elf-gcc --version || true
|
|
||||||
riscv32-esp-elf-gcc --version || true
|
|
||||||
riscv64-unknown-elf-gcc --version || true
|
|
||||||
mkfs.fat --version || true
|
|
||||||
|
|
||||||
- name: Set up build failure matcher
|
|
||||||
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
|
|
||||||
- name: Build board
|
|
||||||
run: python3 -u build_release_files.py
|
|
||||||
working-directory: tools
|
|
||||||
env:
|
|
||||||
BOARDS: ${{ matrix.board }}
|
|
||||||
PULL: ${{ github.event.number }}
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: ${{ matrix.board }}
|
|
||||||
path: bin/${{ matrix.board }}
|
|
||||||
- name: Upload to S3
|
|
||||||
uses: ./.github/actions/upload_aws
|
|
||||||
with:
|
|
||||||
source: bin/
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
74
.github/workflows/build-mpy-cross.yml
vendored
74
.github/workflows/build-mpy-cross.yml
vendored
@ -1,74 +0,0 @@
|
|||||||
name: Build mpy-cross
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
cp-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
secrets:
|
|
||||||
AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
mpy-cross: ["static", "static-aarch64", "static-mingw", "static-raspbian"]
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ inputs.cp-version }}
|
|
||||||
EX_static-mingw: static.exe
|
|
||||||
OS_static: linux-amd64
|
|
||||||
OS_static-aarch64: linux-aarch64
|
|
||||||
OS_static-mingw: windows
|
|
||||||
OS_static-raspbian: linux-raspbian
|
|
||||||
steps:
|
|
||||||
- name: Set up repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: false
|
|
||||||
fetch-depth: 1
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- name: Set up submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
with:
|
|
||||||
target: mpy-cross
|
|
||||||
|
|
||||||
- name: Install toolchain (aarch64)
|
|
||||||
if: matrix.mpy-cross == 'static-aarch64'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y gcc-aarch64-linux-gnu
|
|
||||||
- name: Install toolchain (mingw)
|
|
||||||
if: matrix.mpy-cross == 'static-mingw'
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y mingw-w64
|
|
||||||
|
|
||||||
- name: Build mpy-cross.${{ matrix.mpy-cross }}
|
|
||||||
run: make -C mpy-cross -j2 -f Makefile.${{ matrix.mpy-cross }}
|
|
||||||
|
|
||||||
- name: Set output
|
|
||||||
run: |
|
|
||||||
echo >> $GITHUB_ENV "EX=${{ env[format('EX_{0}', matrix.mpy-cross)] || matrix.mpy-cross }}"
|
|
||||||
echo >> $GITHUB_ENV "OS=${{ env[format('OS_{0}', matrix.mpy-cross)] }}"
|
|
||||||
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: mpy-cross.${{ env.EX }}
|
|
||||||
path: mpy-cross/build-${{ matrix.mpy-cross }}/mpy-cross.${{ env.EX }}
|
|
||||||
- name: Upload to S3
|
|
||||||
uses: ./.github/actions/upload_aws
|
|
||||||
with:
|
|
||||||
source: mpy-cross/build-${{ matrix.mpy-cross }}/mpy-cross.${{ env.EX }}
|
|
||||||
destination: mpy-cross/${{ env.OS }}/mpy-cross-${{ env.OS }}-${{ env.CP_VERSION }}.${{ env.EX }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
616
.github/workflows/build.yml
vendored
616
.github/workflows/build.yml
vendored
@ -9,115 +9,164 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [published]
|
||||||
|
check_suite:
|
||||||
|
types: [rerequested]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
scheduler:
|
test:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
outputs:
|
outputs:
|
||||||
docs: ${{ steps.set-matrix.outputs.docs }}
|
build-doc: ${{ steps.set-matrix.outputs.build-doc }}
|
||||||
ports: ${{ steps.set-matrix.outputs.ports }}
|
boards-arm: ${{ steps.set-matrix.outputs.boards-arm }}
|
||||||
windows: ${{ steps.set-matrix.outputs.windows }}
|
boards-riscv: ${{ steps.set-matrix.outputs.boards-riscv }}
|
||||||
cp-version: ${{ steps.set-up-submodules.outputs.version }}
|
boards-espressif: ${{ steps.set-matrix.outputs.boards-espressif }}
|
||||||
|
boards-aarch: ${{ steps.set-matrix.outputs.boards-aarch }}
|
||||||
steps:
|
steps:
|
||||||
- name: Dump GitHub context
|
- name: Dump GitHub context
|
||||||
run: echo "$GITHUB_CONTEXT"
|
|
||||||
env:
|
env:
|
||||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||||
- name: Set up repository
|
run: echo "$GITHUB_CONTEXT"
|
||||||
uses: actions/checkout@v3
|
- uses: actions/checkout@v2.2.0
|
||||||
with:
|
with:
|
||||||
submodules: false
|
submodules: false
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
- name: Set up python
|
- name: Set up Python 3
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: "3.x"
|
||||||
- name: Duplicate USB VID/PID check
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py test ${{ github.sha }}
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y eatmydata
|
||||||
|
sudo eatmydata apt-get install -y gettext gcc-aarch64-linux-gnu mingw-w64
|
||||||
|
pip install -r requirements-ci.txt -r requirements-dev.txt
|
||||||
|
- name: Versions
|
||||||
|
run: |
|
||||||
|
gcc --version
|
||||||
|
python3 --version
|
||||||
|
- name: Duplicate USB VID/PID Check
|
||||||
run: python3 -u -m tools.ci_check_duplicate_usb_vid_pid
|
run: python3 -u -m tools.ci_check_duplicate_usb_vid_pid
|
||||||
- name: Set up submodules
|
- name: Build mpy-cross
|
||||||
id: set-up-submodules
|
run: make -C mpy-cross -j2
|
||||||
uses: ./.github/actions/deps/submodules
|
- name: Build unix port
|
||||||
|
run: |
|
||||||
|
make -C ports/unix VARIANT=coverage -j2
|
||||||
|
- name: Test all
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1
|
||||||
|
working-directory: tests
|
||||||
|
- name: Print failure info
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --print-failures
|
||||||
|
if: failure()
|
||||||
|
working-directory: tests
|
||||||
|
- name: Native Tests
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --emit native
|
||||||
|
working-directory: tests
|
||||||
|
- name: mpy Tests
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy -d basics float micropython
|
||||||
|
working-directory: tests
|
||||||
|
- name: Native mpy Tests
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy --emit native -d basics float micropython
|
||||||
|
working-directory: tests
|
||||||
|
- name: Build native modules
|
||||||
|
run: |
|
||||||
|
make -C examples/natmod/features1
|
||||||
|
make -C examples/natmod/features2
|
||||||
|
make -C examples/natmod/btree
|
||||||
|
make -C examples/natmod/framebuf
|
||||||
|
make -C examples/natmod/uheapq
|
||||||
|
make -C examples/natmod/urandom
|
||||||
|
make -C examples/natmod/ure
|
||||||
|
make -C examples/natmod/uzlib
|
||||||
|
- name: Test native modules
|
||||||
|
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-natmodtests.py extmod/{btree*,framebuf*,uheapq*,ure*,uzlib*}.py
|
||||||
|
working-directory: tests
|
||||||
|
- name: Build mpy-cross.static-aarch64
|
||||||
|
run: make -C mpy-cross -j2 -f Makefile.static-aarch64
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
action: cache
|
name: mpy-cross.static-aarch64
|
||||||
version: true
|
path: mpy-cross/mpy-cross.static-aarch64
|
||||||
- name: Set up external
|
- name: Build mpy-cross.static-raspbian
|
||||||
uses: ./.github/actions/deps/external
|
run: make -C mpy-cross -j2 -f Makefile.static-raspbian
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
action: cache
|
name: mpy-cross.static-raspbian
|
||||||
- name: Set up mpy-cross
|
path: mpy-cross/mpy-cross.static-raspbian
|
||||||
uses: ./.github/actions/mpy_cross
|
- name: Build mpy-cross.static
|
||||||
|
run: make -C mpy-cross -j2 -f Makefile.static
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
cp-version: ${{ steps.set-up-submodules.outputs.version }}
|
name: mpy-cross.static-amd64-linux
|
||||||
download: false
|
path: mpy-cross/mpy-cross.static
|
||||||
- name: Get last commit with checks
|
- name: Build mpy-cross.static-mingw
|
||||||
id: get-last-commit-with-checks
|
run: make -C mpy-cross -j2 -f Makefile.static-mingw
|
||||||
if: github.event_name == 'pull_request'
|
- uses: actions/upload-artifact@v2
|
||||||
working-directory: tools
|
with:
|
||||||
run: python3 -u ci_changes_per_commit.py
|
name: mpy-cross.static-x64-windows
|
||||||
|
path: mpy-cross/mpy-cross.static.exe
|
||||||
|
- name: Upload mpy-cross builds to S3
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
env:
|
env:
|
||||||
REPO: ${{ github.repository }}
|
AWS_PAGER: ''
|
||||||
PULL: ${{ github.event.number }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
EXCLUDE_COMMIT: ${{ github.event.pull_request.head.sha }}
|
run: |
|
||||||
- name: Set head sha (pull)
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-aarch64 s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-aarch64-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||||
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-raspbian s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-raspbian-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||||
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-amd64-linux-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||||
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static.exe s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-x64-windows-${{ env.CP_VERSION }}.exe --no-progress --region us-east-1
|
||||||
|
- name: "Get changes"
|
||||||
if: github.event_name == 'pull_request'
|
if: github.event_name == 'pull_request'
|
||||||
run: echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> $GITHUB_ENV
|
uses: dorny/paths-filter@v2
|
||||||
- name: Set base sha (pull)
|
id: filter
|
||||||
if: github.event_name == 'pull_request'
|
with:
|
||||||
run: git cat-file -e $SHA && echo "BASE_SHA=$SHA" >> $GITHUB_ENV || true
|
list-files: json
|
||||||
env:
|
filters: |
|
||||||
SHA: ${{ steps.get-last-commit-with-checks.outputs.commit_sha || github.event.pull_request.base.sha }}
|
changed:
|
||||||
- name: Set head sha (push)
|
- '**'
|
||||||
if: github.event_name == 'push'
|
- name: "Set matrix"
|
||||||
run: echo "HEAD_SHA=${{ github.event.after }}" >> $GITHUB_ENV
|
|
||||||
- name: Set base sha (push)
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
run: git cat-file -e $SHA && echo "BASE_SHA=$SHA" >> $GITHUB_ENV || true
|
|
||||||
env:
|
|
||||||
SHA: ${{ github.event.before }}
|
|
||||||
- name: Set matrix
|
|
||||||
id: set-matrix
|
id: set-matrix
|
||||||
run: python3 -u ci_set_matrix.py
|
|
||||||
working-directory: tools
|
working-directory: tools
|
||||||
env:
|
env:
|
||||||
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.check_runs }}
|
CHANGED_FILES: ${{ steps.filter.outputs.changed_files }}
|
||||||
|
run: python3 -u ci_set_matrix.py
|
||||||
|
|
||||||
tests:
|
|
||||||
needs: scheduler
|
|
||||||
uses: ./.github/workflows/run-tests.yml
|
|
||||||
with:
|
|
||||||
cp-version: ${{ needs.scheduler.outputs.cp-version }}
|
|
||||||
|
|
||||||
mpy-cross:
|
|
||||||
needs: scheduler
|
|
||||||
if: needs.scheduler.outputs.ports != '{}'
|
|
||||||
uses: ./.github/workflows/build-mpy-cross.yml
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
cp-version: ${{ needs.scheduler.outputs.cp-version }}
|
|
||||||
|
|
||||||
mpy-cross-mac:
|
mpy-cross-mac:
|
||||||
runs-on: macos-11
|
runs-on: macos-10.15
|
||||||
needs: scheduler
|
|
||||||
if: needs.scheduler.outputs.ports != '{}'
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up repository
|
- name: Dump GitHub context
|
||||||
uses: actions/checkout@v3
|
env:
|
||||||
|
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||||
|
run: echo "$GITHUB_CONTEXT"
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
with:
|
with:
|
||||||
submodules: false
|
submodules: false
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
- name: Set up python
|
- name: Set up Python 3
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: "3.x"
|
||||||
- name: Set up submodules
|
- name: Get CP deps
|
||||||
uses: ./.github/actions/deps/submodules
|
run: python tools/ci_fetch_deps.py mpy-cross-mac ${{ github.sha }}
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
brew install gettext
|
||||||
|
echo >>$GITHUB_PATH /usr/local/opt/gettext/bin
|
||||||
- name: Versions
|
- name: Versions
|
||||||
run: |
|
run: |
|
||||||
gcc --version
|
gcc --version
|
||||||
@ -125,172 +174,353 @@ jobs:
|
|||||||
msgfmt --version
|
msgfmt --version
|
||||||
- name: Build mpy-cross
|
- name: Build mpy-cross
|
||||||
run: make -C mpy-cross -j2
|
run: make -C mpy-cross -j2
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: mpy-cross-macos-11-x64
|
name: mpy-cross-macos-catalina
|
||||||
path: mpy-cross/build/mpy-cross
|
path: mpy-cross/mpy-cross
|
||||||
|
- name: Select SDK for M1 build
|
||||||
|
run: sudo xcode-select -switch /Applications/Xcode_12.3.app
|
||||||
- name: Build mpy-cross (arm64)
|
- name: Build mpy-cross (arm64)
|
||||||
run: make -C mpy-cross -j2 -f Makefile.m1 V=2
|
run: make -C mpy-cross -j2 -f Makefile.m1 V=2
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: mpy-cross-macos-11-arm64
|
name: mpy-cross-macos-bigsur-arm64
|
||||||
path: mpy-cross/build-arm64/mpy-cross-arm64
|
path: mpy-cross/mpy-cross-arm64
|
||||||
- name: Make universal binary
|
- name: Make universal binary
|
||||||
run: lipo -create -output mpy-cross-macos-universal mpy-cross/build/mpy-cross mpy-cross/build-arm64/mpy-cross-arm64
|
run: lipo -create -output mpy-cross-macos-universal mpy-cross/mpy-cross mpy-cross/mpy-cross-arm64
|
||||||
- name: Upload artifact
|
- uses: actions/upload-artifact@v2
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
with:
|
||||||
name: mpy-cross-macos-11-universal
|
name: mpy-cross-macos-universal
|
||||||
path: mpy-cross-macos-universal
|
path: mpy-cross-macos-universal
|
||||||
- name: Upload to S3
|
- name: Upload mpy-cross build to S3
|
||||||
if: >-
|
|
||||||
(github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') ||
|
|
||||||
(github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
|
||||||
run: |
|
run: |
|
||||||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross-macos-universal s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-universal --no-progress --region us-east-1
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross-macos-universal s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-universal-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/build-arm64/mpy-cross-arm64 s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-arm64 --no-progress --region us-east-1
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross-arm64 s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-bigsur-${{ env.CP_VERSION }}-arm64 --no-progress --region us-east-1
|
||||||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/build/mpy-cross s3://adafruit-circuit-python/bin/mpy-cross/macos-11/mpy-cross-macos-11-${{ env.CP_VERSION }}-x64 --no-progress --region us-east-1
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross-macos-catalina-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||||
env:
|
env:
|
||||||
AWS_PAGER: ''
|
AWS_PAGER: ''
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
|
|
||||||
docs:
|
|
||||||
runs-on: ubuntu-22.04
|
build-doc:
|
||||||
needs: scheduler
|
runs-on: ubuntu-20.04
|
||||||
if: needs.scheduler.outputs.docs == 'True'
|
needs: test
|
||||||
env:
|
if: ${{ needs.test.outputs.build-doc == 'True' }}
|
||||||
CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set up repository
|
- uses: actions/checkout@v2.2.0
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
with:
|
||||||
submodules: false
|
submodules: false
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
- name: Set up python
|
- name: Get CP deps
|
||||||
uses: actions/setup-python@v4
|
run: python tools/ci_fetch_deps.py docs ${{ github.sha }}
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: "3.x"
|
||||||
- name: Set up submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y latexmk librsvg2-bin texlive-fonts-recommended texlive-latex-recommended texlive-latex-extra
|
sudo apt-get install -y eatmydata
|
||||||
pip install -r requirements-doc.txt
|
sudo eatmydata apt-get install -y latexmk librsvg2-bin texlive-fonts-recommended texlive-latex-recommended texlive-latex-extra
|
||||||
|
pip install -r requirements-ci.txt -r requirements-doc.txt
|
||||||
- name: Build and Validate Stubs
|
- name: Build and Validate Stubs
|
||||||
run: make check-stubs -j2
|
run: make check-stubs -j2
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: stubs
|
name: stubs
|
||||||
path: circuitpython-stubs/dist/*
|
path: circuitpython-stubs/dist/*
|
||||||
- name: Test Documentation Build (HTML)
|
- name: Test Documentation Build (HTML)
|
||||||
run: sphinx-build -E -W -b html -D version=${{ env.CP_VERSION }} -D release=${{ env.CP_VERSION }} . _build/html
|
run: sphinx-build -E -W -b html -D version=${{ env.CP_VERSION }} -D release=${{ env.CP_VERSION }} . _build/html
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: docs
|
name: docs
|
||||||
path: _build/html
|
path: _build/html
|
||||||
- name: Test Documentation Build (LaTeX/PDF)
|
- name: Test Documentation Build (LaTeX/PDF)
|
||||||
run: |
|
run: |
|
||||||
make latexpdf
|
make latexpdf
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: docs
|
name: docs
|
||||||
path: _build/latex
|
path: _build/latex
|
||||||
- name: Upload to S3
|
- name: Upload stubs to S3
|
||||||
uses: ./.github/actions/upload_aws
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
with:
|
env:
|
||||||
source: circuitpython-stubs/dist/*.tar.gz
|
AWS_PAGER: ''
|
||||||
destination: stubs/circuitpython-stubs-${{ env.CP_VERSION }}.tar.gz
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
run: |
|
||||||
|
zip -9r circuitpython-stubs.zip circuitpython-stubs
|
||||||
|
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp circuitpython-stubs/dist/*.tar.gz s3://adafruit-circuit-python/bin/stubs/circuitpython-stubs-${{ env.CP_VERSION }}.zip --no-progress --region us-east-1
|
||||||
- name: Upload stubs to PyPi
|
- name: Upload stubs to PyPi
|
||||||
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
|
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
|
||||||
env:
|
env:
|
||||||
TWINE_USERNAME: ${{ secrets.pypi_username }}
|
TWINE_USERNAME: ${{ secrets.pypi_username }}
|
||||||
TWINE_PASSWORD: ${{ secrets.pypi_password }}
|
TWINE_PASSWORD: ${{ secrets.pypi_password }}
|
||||||
run: |
|
run: |
|
||||||
# python -m build was run by 'make stubs'
|
# setup.py sdist was run by 'make stubs'
|
||||||
[ -z "$TWINE_USERNAME" ] || echo "Uploading dev release to PyPi"
|
[ -z "$TWINE_USERNAME" ] || echo "Uploading dev release to PyPi"
|
||||||
[ -z "$TWINE_USERNAME" ] || twine upload circuitpython-stubs/dist/*
|
[ -z "$TWINE_USERNAME" ] || twine upload circuitpython-stubs/dist/*
|
||||||
|
|
||||||
windows:
|
|
||||||
runs-on: windows-2022
|
|
||||||
needs: scheduler
|
|
||||||
if: needs.scheduler.outputs.windows == 'True'
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ needs.scheduler.outputs.cp-version }}
|
|
||||||
defaults:
|
|
||||||
run:
|
|
||||||
# We define a custom shell script here, although `msys2.cmd` does neither exist nor is it available in the PATH yet
|
|
||||||
shell: msys2 {0}
|
|
||||||
steps:
|
|
||||||
# We want to change the configuration of the git command that actions/checkout will be using
|
|
||||||
# (since it is not possible to set autocrlf through the action yet, see actions/checkout#226).
|
|
||||||
- run: git config --global core.autocrlf input
|
|
||||||
shell: bash
|
|
||||||
- name: Check python coding (cmd)
|
|
||||||
run: python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
|
||||||
shell: cmd
|
|
||||||
# We use a JS Action, which calls the system terminal or other custom terminals directly, if required
|
|
||||||
- uses: msys2/setup-msys2@v2
|
|
||||||
with:
|
|
||||||
install: base-devel git wget unzip gcc python-pip
|
|
||||||
# The goal of this was to test how things worked when the default file encoding (locale.getpreferedencoding())
|
|
||||||
# was not UTF-8. However, msys2 python does use utf-8 as the preferred file encoding, and using actions/setup-python
|
|
||||||
# python3.8 gave a broken build, so we're not really testing what we wanted to test.
|
|
||||||
# However, commandline length limits are being tested so that does some good.
|
|
||||||
- name: Check python coding (msys2)
|
|
||||||
run: |
|
|
||||||
locale -v
|
|
||||||
which python; python --version
|
|
||||||
python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
|
||||||
which python3; python3 --version
|
|
||||||
python3 -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
wget --no-verbose -O gcc-arm.zip https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip
|
|
||||||
unzip -q -d /tmp gcc-arm.zip
|
|
||||||
tar -C /tmp/gcc-arm-none-* -cf - . | tar -C /usr/local -xf -
|
|
||||||
pip install wheel
|
|
||||||
# requirements_dev.txt doesn't install on windows. (with msys2 python)
|
|
||||||
# instead, pick a subset for what we want to do
|
|
||||||
pip install cascadetoml jinja2 typer click intelhex
|
|
||||||
# check that installed packages work....?
|
|
||||||
which python; python --version; python -c "import cascadetoml"
|
|
||||||
which python3; python3 --version; python3 -c "import cascadetoml"
|
|
||||||
- name: Set up repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: false
|
|
||||||
fetch-depth: 1
|
|
||||||
- name: Set up submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
- name: build mpy-cross
|
|
||||||
run: make -j2 -C mpy-cross
|
|
||||||
- name: build rp2040
|
|
||||||
run: make -j2 -C ports/raspberrypi BOARD=adafruit_feather_rp2040 TRANSLATION=de_DE
|
|
||||||
- name: build samd21
|
|
||||||
run: make -j2 -C ports/atmel-samd BOARD=feather_m0_express TRANSLATION=zh_Latn_pinyin
|
|
||||||
- name: build samd51
|
|
||||||
run: make -j2 -C ports/atmel-samd BOARD=feather_m4_express TRANSLATION=es
|
|
||||||
- name: build nrf
|
|
||||||
run: make -j2 -C ports/nrf BOARD=feather_nrf52840_express TRANSLATION=fr
|
|
||||||
- name: build stm
|
|
||||||
run: make -j2 -C ports/stm BOARD=feather_stm32f405_express TRANSLATION=pt_BR
|
|
||||||
# I gave up trying to do esp builds on windows when I saw
|
|
||||||
# ERROR: Platform MINGW64_NT-10.0-17763-x86_64 appears to be unsupported
|
|
||||||
# https://github.com/espressif/esp-idf/issues/7062
|
|
||||||
|
|
||||||
ports:
|
build-arm:
|
||||||
needs: [scheduler, mpy-cross, tests]
|
runs-on: ubuntu-20.04
|
||||||
if: needs.scheduler.outputs.ports != '{}'
|
needs: test
|
||||||
uses: ./.github/workflows/build-boards.yml
|
|
||||||
secrets: inherit
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
port: ${{ fromJSON(needs.scheduler.outputs.ports).ports }}
|
board: ${{ fromJSON(needs.test.outputs.boards-arm) }}
|
||||||
|
if: ${{ needs.test.outputs.boards-arm != '[]' }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
boards: ${{ toJSON(fromJSON(needs.scheduler.outputs.ports)[matrix.port]) }}
|
python-version: "3.x"
|
||||||
cp-version: ${{ needs.scheduler.outputs.cp-version }}
|
- uses: actions/checkout@v2.2.0
|
||||||
|
with:
|
||||||
|
submodules: false
|
||||||
|
fetch-depth: 1
|
||||||
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y gettext
|
||||||
|
pip install -r requirements-ci.txt -r requirements-dev.txt
|
||||||
|
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
|
||||||
|
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
|
||||||
|
- name: Versions
|
||||||
|
run: |
|
||||||
|
gcc --version
|
||||||
|
arm-none-eabi-gcc --version
|
||||||
|
python3 --version
|
||||||
|
- name: mpy-cross
|
||||||
|
run: make -C mpy-cross -j2
|
||||||
|
- name: Setup build failure matcher
|
||||||
|
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
|
||||||
|
- name: build
|
||||||
|
run: python3 -u build_release_files.py
|
||||||
|
working-directory: tools
|
||||||
|
env:
|
||||||
|
BOARDS: ${{ matrix.board }}
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.board }}
|
||||||
|
path: bin/${{ matrix.board }}
|
||||||
|
- name: Upload to S3
|
||||||
|
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
|
||||||
|
env:
|
||||||
|
AWS_PAGER: ''
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
|
|
||||||
|
|
||||||
|
build-riscv:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs: test
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
board: ${{ fromJSON(needs.test.outputs.boards-riscv) }}
|
||||||
|
if: ${{ needs.test.outputs.boards-riscv != '[]' }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
|
with:
|
||||||
|
submodules: false
|
||||||
|
fetch-depth: 1
|
||||||
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y gettext
|
||||||
|
pip install -r requirements-ci.txt -r requirements-dev.txt
|
||||||
|
wget https://static.dev.sifive.com/dev-tools/riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
|
||||||
|
sudo tar -C /usr --strip-components=1 -xaf riscv64-unknown-elf-gcc-8.3.0-2019.08.0-x86_64-linux-centos6.tar.gz
|
||||||
|
- name: Versions
|
||||||
|
run: |
|
||||||
|
gcc --version
|
||||||
|
riscv64-unknown-elf-gcc --version
|
||||||
|
python3 --version
|
||||||
|
- name: mpy-cross
|
||||||
|
run: make -C mpy-cross -j2
|
||||||
|
- name: Setup build failure matcher
|
||||||
|
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
|
||||||
|
- name: build
|
||||||
|
run: python3 -u build_release_files.py
|
||||||
|
working-directory: tools
|
||||||
|
env:
|
||||||
|
BOARDS: ${{ matrix.board }}
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.board }}
|
||||||
|
path: bin/${{ matrix.board }}
|
||||||
|
- name: Upload to S3
|
||||||
|
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
|
||||||
|
env:
|
||||||
|
AWS_PAGER: ''
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
|
|
||||||
|
|
||||||
|
build-espressif:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs: test
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
board: ${{ fromJSON(needs.test.outputs.boards-espressif) }}
|
||||||
|
if: ${{ needs.test.outputs.boards-espressif != '[]' }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
|
with:
|
||||||
|
submodules: false
|
||||||
|
fetch-depth: 1
|
||||||
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
|
- uses: actions/cache@v2
|
||||||
|
name: Fetch IDF tool cache
|
||||||
|
id: idf-cache
|
||||||
|
with:
|
||||||
|
path: ${{ github.workspace }}/.idf_tools
|
||||||
|
key: ${{ runner.os }}-idf-tools-${{ hashFiles('.git/modules/ports/espressif/esp-idf/HEAD') }}-20220404
|
||||||
|
- name: Clone IDF submodules
|
||||||
|
run: |
|
||||||
|
(cd $IDF_PATH && git submodule update --init)
|
||||||
|
env:
|
||||||
|
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||||
|
- name: Install IDF tools
|
||||||
|
run: |
|
||||||
|
$IDF_PATH/tools/idf_tools.py --non-interactive install required
|
||||||
|
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
|
||||||
|
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
|
||||||
|
rm -rf $IDF_TOOLS_PATH/dist
|
||||||
|
env:
|
||||||
|
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||||
|
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
source $IDF_PATH/export.sh
|
||||||
|
sudo apt-get install -y gettext ninja-build
|
||||||
|
pip install -r requirements-ci.txt -r requirements-dev.txt
|
||||||
|
env:
|
||||||
|
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||||
|
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
|
||||||
|
- name: Versions
|
||||||
|
run: |
|
||||||
|
source $IDF_PATH/export.sh
|
||||||
|
gcc --version
|
||||||
|
xtensa-esp32s2-elf-gcc --version
|
||||||
|
python3 --version
|
||||||
|
ninja --version
|
||||||
|
cmake --version
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||||
|
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
|
||||||
|
- name: mpy-cross
|
||||||
|
run: make -C mpy-cross -j2
|
||||||
|
- name: Setup build failure matcher
|
||||||
|
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
|
||||||
|
- name: build
|
||||||
|
run: |
|
||||||
|
source $IDF_PATH/export.sh
|
||||||
|
python3 -u build_release_files.py
|
||||||
|
working-directory: tools
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||||
|
IDF_TOOLS_PATH: ${{ github.workspace }}/.idf_tools
|
||||||
|
BOARDS: ${{ matrix.board }}
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.board }}
|
||||||
|
path: bin/${{ matrix.board }}
|
||||||
|
- name: Upload to S3
|
||||||
|
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
|
||||||
|
env:
|
||||||
|
AWS_PAGER: ''
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
|
|
||||||
|
build-aarch:
|
||||||
|
runs-on: ubuntu-20.04
|
||||||
|
needs: test
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
board: ${{ fromJSON(needs.test.outputs.boards-aarch) }}
|
||||||
|
if: ${{ needs.test.outputs.boards-aarch != '[]' }}
|
||||||
|
steps:
|
||||||
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
|
with:
|
||||||
|
submodules: false
|
||||||
|
fetch-depth: 1
|
||||||
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py ${{ matrix.board }} ${{ github.sha }}
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y gettext mtools
|
||||||
|
pip install -r requirements-ci.txt -r requirements-dev.txt
|
||||||
|
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
|
||||||
|
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-10.3-2021.07-x86_64-aarch64-none-elf.tar.xz
|
||||||
|
wget --no-verbose https://adafruit-circuit-python.s3.amazonaws.com/gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
|
||||||
|
sudo tar -C /usr --strip-components=1 -xaf gcc-arm-none-eabi-10-2020-q4-major-x86_64-linux.tar.bz2
|
||||||
|
- name: Install mkfs.fat
|
||||||
|
run: |
|
||||||
|
wget https://github.com/dosfstools/dosfstools/releases/download/v4.2/dosfstools-4.2.tar.gz
|
||||||
|
tar -xaf dosfstools-4.2.tar.gz
|
||||||
|
cd dosfstools-4.2
|
||||||
|
./configure
|
||||||
|
make -j 2
|
||||||
|
cd src
|
||||||
|
echo >>$GITHUB_PATH $(pwd)
|
||||||
|
- name: Versions
|
||||||
|
run: |
|
||||||
|
gcc --version
|
||||||
|
aarch64-none-elf-gcc --version
|
||||||
|
arm-none-eabi-gcc --version
|
||||||
|
python3 --version
|
||||||
|
mkfs.fat --version || true
|
||||||
|
- name: mpy-cross
|
||||||
|
run: make -C mpy-cross -j2
|
||||||
|
- name: Setup build failure matcher
|
||||||
|
run: echo "::add-matcher::$GITHUB_WORKSPACE/.github/workflows/match-build-fail.json"
|
||||||
|
- name: build
|
||||||
|
run: python3 -u build_release_files.py
|
||||||
|
working-directory: tools
|
||||||
|
env:
|
||||||
|
BOARDS: ${{ matrix.board }}
|
||||||
|
- uses: actions/upload-artifact@v2
|
||||||
|
with:
|
||||||
|
name: ${{ matrix.board }}
|
||||||
|
path: bin/${{ matrix.board }}
|
||||||
|
- name: Upload to S3
|
||||||
|
run: "[ -z \"$AWS_ACCESS_KEY_ID\" ] || aws s3 cp bin/ s3://adafruit-circuit-python/bin/ --recursive --no-progress --region us-east-1"
|
||||||
|
env:
|
||||||
|
AWS_PAGER: ''
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||||
|
@ -10,34 +10,37 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
website:
|
website:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Dump GitHub context
|
- name: Dump GitHub context
|
||||||
run: echo "$GITHUB_CONTEXT"
|
|
||||||
env:
|
env:
|
||||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||||
- name: Set up repository
|
run: echo "$GITHUB_CONTEXT"
|
||||||
uses: actions/checkout@v3
|
- uses: actions/checkout@v2.2.0
|
||||||
with:
|
with:
|
||||||
submodules: false
|
submodules: false
|
||||||
fetch-depth: 1
|
fetch-depth: 1
|
||||||
- name: Set up python
|
- name: Set up Python 3
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: 3.x
|
python-version: "3.x"
|
||||||
- name: Set up submodules
|
- name: Get CP deps
|
||||||
uses: ./.github/actions/deps/submodules
|
run: python tools/ci_fetch_deps.py website ${{ github.sha }}
|
||||||
with:
|
- name: Install deps
|
||||||
version: true
|
run: |
|
||||||
- name: Set up external
|
pip install -r requirements-dev.txt
|
||||||
uses: ./.github/actions/deps/external
|
|
||||||
- name: Versions
|
- name: Versions
|
||||||
run: |
|
run: |
|
||||||
gcc --version
|
gcc --version
|
||||||
python3 --version
|
python3 --version
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
- name: Website
|
- name: Website
|
||||||
run: python3 build_board_info.py
|
run: python3 build_board_info.py
|
||||||
working-directory: tools
|
working-directory: tools
|
||||||
env:
|
env:
|
||||||
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
RELEASE_TAG: ${{ github.event.release.tag_name }}
|
||||||
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.ADABOT_GITHUB_ACCESS_TOKEN }}
|
ADABOT_GITHUB_ACCESS_TOKEN: ${{ secrets.BLINKA_GITHUB_ACCESS_TOKEN }}
|
||||||
|
if: github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested')
|
90
.github/workflows/custom-board-build.yml
vendored
90
.github/workflows/custom-board-build.yml
vendored
@ -1,90 +0,0 @@
|
|||||||
name: Custom board build
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
board:
|
|
||||||
description: 'Board: Found in ports/*/boards/[board_id]'
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
version:
|
|
||||||
description: 'Version: Can be a tag or a commit (>=8.1.0)'
|
|
||||||
required: false
|
|
||||||
default: latest
|
|
||||||
type: string
|
|
||||||
language:
|
|
||||||
description: 'Language: Found in locale/[language].po'
|
|
||||||
required: false
|
|
||||||
default: en_US
|
|
||||||
type: string
|
|
||||||
flags:
|
|
||||||
description: 'Flags: Build flags (e.g. CIRCUITPY_WIFI=1)'
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
debug:
|
|
||||||
description: 'Make a debug build'
|
|
||||||
required: false
|
|
||||||
default: false
|
|
||||||
type: boolean
|
|
||||||
|
|
||||||
run-name: ${{ inputs.board }}-${{ inputs.language }}-${{ inputs.version }}${{ inputs.flags != '' && '-custom' || '' }}${{ inputs.debug && '-debug' || '' }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
- name: Set up repository
|
|
||||||
run: |
|
|
||||||
git clone --filter=tree:0 https://github.com/adafruit/circuitpython.git $GITHUB_WORKSPACE
|
|
||||||
git checkout ${{ inputs.version == 'latest' && 'HEAD' || inputs.version }}
|
|
||||||
- name: Set up identifier
|
|
||||||
if: inputs.debug || inputs.flags != ''
|
|
||||||
run: |
|
|
||||||
> custom-build && git add custom-build
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- name: Set up port
|
|
||||||
id: set-up-port
|
|
||||||
uses: ./.github/actions/deps/ports
|
|
||||||
with:
|
|
||||||
board: ${{ inputs.board }}
|
|
||||||
- name: Set up submodules
|
|
||||||
id: set-up-submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
with:
|
|
||||||
action: cache
|
|
||||||
target: ${{ inputs.board }}
|
|
||||||
- name: Set up external
|
|
||||||
uses: ./.github/actions/deps/external
|
|
||||||
with:
|
|
||||||
action: cache
|
|
||||||
port: ${{ steps.set-up-port.outputs.port }}
|
|
||||||
- name: Set up mpy-cross
|
|
||||||
if: steps.set-up-submodules.outputs.frozen == 'True'
|
|
||||||
uses: ./.github/actions/mpy_cross
|
|
||||||
with:
|
|
||||||
cp-version: ${{ steps.set-up-submodules.outputs.version }}
|
|
||||||
download: false
|
|
||||||
- name: Versions
|
|
||||||
run: |
|
|
||||||
tools/describe
|
|
||||||
gcc --version
|
|
||||||
python3 --version
|
|
||||||
cmake --version || true
|
|
||||||
ninja --version || true
|
|
||||||
aarch64-none-elf-gcc --version || true
|
|
||||||
arm-none-eabi-gcc --version || true
|
|
||||||
xtensa-esp32-elf-gcc --version || true
|
|
||||||
riscv32-esp-elf-gcc --version || true
|
|
||||||
riscv64-unknown-elf-gcc --version || true
|
|
||||||
mkfs.fat --version || true
|
|
||||||
- name: Build board
|
|
||||||
run: make -j2 ${{ inputs.flags }} BOARD=${{ inputs.board }} DEBUG=${{ inputs.debug && '1' || '0' }} TRANSLATION=${{ inputs.language }}
|
|
||||||
working-directory: ports/${{ steps.set-up-port.outputs.port }}
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v3
|
|
||||||
with:
|
|
||||||
name: ${{ inputs.board }}-${{ inputs.language }}-${{ inputs.version }}${{ inputs.flags != '' && '-custom' || '' }}${{ inputs.debug && '-debug' || '' }}
|
|
||||||
path: ports/${{ steps.set-up-port.outputs.port }}/build-${{ inputs.board }}/firmware.*
|
|
18
.github/workflows/notify-on-issue-label.yml
vendored
18
.github/workflows/notify-on-issue-label.yml
vendored
@ -1,18 +0,0 @@
|
|||||||
name: Notify users based on issue labels
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
notify:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
steps:
|
|
||||||
- uses: tekktrik/issue-labeled-ping@v1
|
|
||||||
with:
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
user: v923z
|
|
||||||
label: ulab
|
|
||||||
message: Heads up {user} - the "{label}" label was applied to this issue.
|
|
14
.github/workflows/notify.yml
vendored
Normal file
14
.github/workflows/notify.yml
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
name: Notify users based on issue labels
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
notify:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: jenschelkopf/issue-label-notification-action@1.3
|
||||||
|
with:
|
||||||
|
recipients: |
|
||||||
|
ulab=@v923z
|
113
.github/workflows/ports_windows.yml
vendored
Normal file
113
.github/workflows/ports_windows.yml
vendored
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
name: windows port
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '.github/workflows/*.yml'
|
||||||
|
- 'extmod/**'
|
||||||
|
- 'lib/**'
|
||||||
|
- 'mpy-cross/**'
|
||||||
|
- 'ports/unix/**'
|
||||||
|
- 'ports/windows/**'
|
||||||
|
- 'py/**'
|
||||||
|
- 'requirements*.txt'
|
||||||
|
- 'tools/**'
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: windows-2019
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
# We define a custom shell script here, although `msys2.cmd` does neither exist nor is it available in the PATH yet
|
||||||
|
shell: msys2 {0}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
# We want to change the configuration of the git command that actions/checkout will be using (since it is not possible to set autocrlf through the action yet, see actions/checkout#226).
|
||||||
|
- run: git config --global core.autocrlf input
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
- name: Check python coding (cmd)
|
||||||
|
run: |
|
||||||
|
python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
||||||
|
shell: cmd
|
||||||
|
|
||||||
|
# We use a JS Action, which calls the system terminal or other custom terminals directly, if required
|
||||||
|
- uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
update: true
|
||||||
|
install: base-devel git wget unzip gcc python-pip
|
||||||
|
|
||||||
|
# The goal of this was to test how things worked when the default file
|
||||||
|
# encoding (locale.getpreferedencoding()) was not UTF-8. However, msys2
|
||||||
|
# python does use utf-8 as the preferred file encoding, and using
|
||||||
|
# actions/setup-python python3.8 gave a broken build, so we're not really
|
||||||
|
# testing what we wanted to test.
|
||||||
|
#
|
||||||
|
# however, commandline length limits are being tested so that does some
|
||||||
|
# good.
|
||||||
|
- name: Check python coding (msys2)
|
||||||
|
run: |
|
||||||
|
locale -v
|
||||||
|
which python; python --version
|
||||||
|
python -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
||||||
|
which python3; python3 --version
|
||||||
|
python3 -c "import sys, locale; print(sys.getdefaultencoding(), locale.getpreferredencoding(False))"
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
wget --no-verbose -O gcc-arm.zip https://developer.arm.com/-/media/Files/downloads/gnu-rm/10-2020q4/gcc-arm-none-eabi-10-2020-q4-major-win32.zip
|
||||||
|
unzip -q -d /tmp gcc-arm.zip
|
||||||
|
tar -C /tmp/gcc-arm-none-* -cf - . | tar -C /usr/local -xf -
|
||||||
|
pip install wheel
|
||||||
|
# requirements_dev.txt doesn't install on windows. (with msys2 python)
|
||||||
|
# instead, pick a subset for what we want to do
|
||||||
|
# Undo the pin of typer & click when undoing it in requirements-dev.txt
|
||||||
|
pip install cascadetoml jinja2 typer==0.4.0 click==8.0.4 intelhex
|
||||||
|
# check that installed packages work....?
|
||||||
|
which python; python --version; python -c "import cascadetoml"
|
||||||
|
which python3; python3 --version; python3 -c "import cascadetoml"
|
||||||
|
|
||||||
|
- uses: actions/checkout@v2.2.0
|
||||||
|
with:
|
||||||
|
submodules: false
|
||||||
|
fetch-depth: 1
|
||||||
|
- name: Get CP deps
|
||||||
|
run: python tools/ci_fetch_deps.py windows ${{ github.sha }}
|
||||||
|
- name: CircuitPython version
|
||||||
|
run: |
|
||||||
|
tools/describe || git log --parents HEAD~4..
|
||||||
|
echo >>$GITHUB_ENV CP_VERSION=$(tools/describe)
|
||||||
|
|
||||||
|
- name: build mpy-cross
|
||||||
|
run: make -j2 -C mpy-cross
|
||||||
|
|
||||||
|
- name: build rp2040
|
||||||
|
run: make -j2 -C ports/raspberrypi BOARD=adafruit_feather_rp2040 TRANSLATION=de_DE
|
||||||
|
|
||||||
|
- name: build samd21
|
||||||
|
run: make -j2 -C ports/atmel-samd BOARD=feather_m0_express TRANSLATION=zh_Latn_pinyin
|
||||||
|
|
||||||
|
- name: build samd51
|
||||||
|
run: make -j2 -C ports/atmel-samd BOARD=feather_m4_express TRANSLATION=es
|
||||||
|
|
||||||
|
- name: build nrf
|
||||||
|
run: make -j2 -C ports/nrf BOARD=feather_nrf52840_express TRANSLATION=fr
|
||||||
|
|
||||||
|
- name: build stm
|
||||||
|
run: make -j2 -C ports/stm BOARD=feather_stm32f405_express TRANSLATION=pt_BR
|
||||||
|
|
||||||
|
# I gave up trying to do esp32 builds on windows when I saw
|
||||||
|
# ERROR: Platform MINGW64_NT-10.0-17763-x86_64 appears to be unsupported
|
||||||
|
# https://github.com/espressif/esp-idf/issues/7062
|
||||||
|
#
|
||||||
|
# - name: prepare esp
|
||||||
|
# run: ports/espressif/esp-idf/install.bat
|
||||||
|
# shell: cmd
|
||||||
|
#
|
||||||
|
# - name: build esp
|
||||||
|
# run: . ports/espressif/esp-idf/export.sh && make -j2 -C ports/espressif BOARD=adafruit_metro_esp32s2
|
38
.github/workflows/pre-commit.yml
vendored
38
.github/workflows/pre-commit.yml
vendored
@ -5,8 +5,8 @@
|
|||||||
name: pre-commit
|
name: pre-commit
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
push:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
|
||||||
@ -14,33 +14,33 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Set up repository
|
- uses: actions/checkout@v2.2.0
|
||||||
uses: actions/checkout@v3
|
- name: Set up Python 3
|
||||||
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
submodules: false
|
python-version: "3.x"
|
||||||
fetch-depth: 1
|
- name: Install deps
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.x
|
|
||||||
- name: Set up submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
- name: Set up external
|
|
||||||
uses: ./.github/actions/deps/external
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-add-repository -y -u ppa:pybricks/ppa
|
||||||
sudo apt-get install -y gettext uncrustify
|
sudo apt-get install -y gettext uncrustify
|
||||||
- name: Run pre-commit
|
pip3 install black polib pyyaml
|
||||||
uses: pre-commit/action@v3.0.0
|
- name: Populate selected submodules
|
||||||
|
run: git submodule update --init extmod/ulab
|
||||||
|
- name: Set PY
|
||||||
|
run: echo >>$GITHUB_ENV PY="$(python -c 'import hashlib, sys;print(hashlib.sha256(sys.version.encode()+sys.executable.encode()).hexdigest())')"
|
||||||
|
- uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ~/.cache/pre-commit
|
||||||
|
key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- uses: pre-commit/action@v1.1.0
|
||||||
- name: Make patch
|
- name: Make patch
|
||||||
if: failure()
|
if: failure()
|
||||||
run: git diff > ~/pre-commit.patch
|
run: git diff > ~/pre-commit.patch
|
||||||
- name: Upload patch
|
- name: Upload patch
|
||||||
if: failure()
|
if: failure()
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v2
|
||||||
with:
|
with:
|
||||||
name: patch
|
name: patch
|
||||||
path: ~/pre-commit.patch
|
path: ~/pre-commit.patch
|
||||||
|
66
.github/workflows/run-tests.yml
vendored
66
.github/workflows/run-tests.yml
vendored
@ -1,66 +0,0 @@
|
|||||||
name: Run tests
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
cp-version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run:
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
test: [all, mpy, native, native_mpy]
|
|
||||||
env:
|
|
||||||
CP_VERSION: ${{ inputs.cp-version }}
|
|
||||||
MICROPY_CPYTHON3: python3.8
|
|
||||||
MICROPY_MICROPYTHON: ../ports/unix/build-coverage/micropython
|
|
||||||
TEST_all:
|
|
||||||
TEST_mpy: --via-mpy -d basics float micropython
|
|
||||||
TEST_native: --emit native
|
|
||||||
TEST_native_mpy: --via-mpy --emit native -d basics float micropython
|
|
||||||
steps:
|
|
||||||
- name: Set up repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: false
|
|
||||||
fetch-depth: 1
|
|
||||||
- name: Set up python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.8
|
|
||||||
- name: Set up submodules
|
|
||||||
uses: ./.github/actions/deps/submodules
|
|
||||||
with:
|
|
||||||
target: tests
|
|
||||||
- name: Set up external
|
|
||||||
if: matrix.test == 'all'
|
|
||||||
uses: ./.github/actions/deps/external
|
|
||||||
- name: Set up mpy-cross
|
|
||||||
uses: ./.github/actions/mpy_cross
|
|
||||||
with:
|
|
||||||
cp-version: ${{ inputs.cp-version }}
|
|
||||||
- name: Build unix port
|
|
||||||
run: make -C ports/unix VARIANT=coverage -j2
|
|
||||||
- name: Run tests
|
|
||||||
run: ./run-tests.py -j2 ${{ env[format('TEST_{0}', matrix.test)] }}
|
|
||||||
working-directory: tests
|
|
||||||
- name: Print failure info
|
|
||||||
run: ./run-tests.py -j2 --print-failures
|
|
||||||
if: failure()
|
|
||||||
working-directory: tests
|
|
||||||
- name: Build native modules
|
|
||||||
if: matrix.test == 'all'
|
|
||||||
run: |
|
|
||||||
make -C examples/natmod/features1
|
|
||||||
make -C examples/natmod/features2
|
|
||||||
make -C examples/natmod/heapq
|
|
||||||
make -C examples/natmod/random
|
|
||||||
make -C examples/natmod/re
|
|
||||||
- name: Test native modules
|
|
||||||
if: matrix.test == 'all'
|
|
||||||
run: ./run-natmodtests.py extmod/{heapq*,re*,zlib*}.py
|
|
||||||
working-directory: tests
|
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -9,7 +9,6 @@
|
|||||||
!atmel-samd/asf/**/*.a
|
!atmel-samd/asf/**/*.a
|
||||||
*.elf
|
*.elf
|
||||||
*.bin
|
*.bin
|
||||||
!*.toml.bin
|
|
||||||
*.map
|
*.map
|
||||||
*.hex
|
*.hex
|
||||||
*.dis
|
*.dis
|
||||||
|
98
.gitmodules
vendored
98
.gitmodules
vendored
@ -33,6 +33,9 @@
|
|||||||
path = ports/atmel-samd/asf4
|
path = ports/atmel-samd/asf4
|
||||||
url = https://github.com/adafruit/asf4.git
|
url = https://github.com/adafruit/asf4.git
|
||||||
branch = circuitpython
|
branch = circuitpython
|
||||||
|
[submodule "tools/usb_descriptor"]
|
||||||
|
path = tools/usb_descriptor
|
||||||
|
url = https://github.com/adafruit/usb_descriptor.git
|
||||||
[submodule "lib/nrfutil"]
|
[submodule "lib/nrfutil"]
|
||||||
path = lib/nrfutil
|
path = lib/nrfutil
|
||||||
url = https://github.com/adafruit/nRF52_nrfutil
|
url = https://github.com/adafruit/nRF52_nrfutil
|
||||||
@ -100,7 +103,7 @@
|
|||||||
url = https://github.com/adafruit/Adafruit_MP3
|
url = https://github.com/adafruit/Adafruit_MP3
|
||||||
[submodule "ports/mimxrt10xx/sdk"]
|
[submodule "ports/mimxrt10xx/sdk"]
|
||||||
path = ports/mimxrt10xx/sdk
|
path = ports/mimxrt10xx/sdk
|
||||||
url = https://github.com/nxp-mcuxpresso/mcux-sdk.git
|
url = https://github.com/adafruit/MIMXRT10xx_SDK
|
||||||
[submodule "frozen/Adafruit_CircuitPython_Register"]
|
[submodule "frozen/Adafruit_CircuitPython_Register"]
|
||||||
path = frozen/Adafruit_CircuitPython_Register
|
path = frozen/Adafruit_CircuitPython_Register
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_Register.git
|
url = https://github.com/adafruit/Adafruit_CircuitPython_Register.git
|
||||||
@ -143,14 +146,10 @@
|
|||||||
[submodule "ports/espressif/esp-idf"]
|
[submodule "ports/espressif/esp-idf"]
|
||||||
path = ports/espressif/esp-idf
|
path = ports/espressif/esp-idf
|
||||||
url = https://github.com/adafruit/esp-idf.git
|
url = https://github.com/adafruit/esp-idf.git
|
||||||
branch = circuitpython-v5.1
|
branch = circuitpython-v4.4
|
||||||
[submodule "ports/espressif/esp-protocols"]
|
[submodule "ports/espressif/certificates/nina-fw"]
|
||||||
path = ports/espressif/esp-protocols
|
path = ports/espressif/certificates/nina-fw
|
||||||
url = https://github.com/espressif/esp-protocols.git
|
url = https://github.com/adafruit/nina-fw.git
|
||||||
[submodule "ports/espressif/esp-camera"]
|
|
||||||
path = ports/espressif/esp-camera
|
|
||||||
url = https://github.com/adafruit/esp32-camera.git
|
|
||||||
branch = circuitpython
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_ST7789"]
|
[submodule "frozen/Adafruit_CircuitPython_ST7789"]
|
||||||
path = frozen/Adafruit_CircuitPython_ST7789
|
path = frozen/Adafruit_CircuitPython_ST7789
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_ST7789
|
url = https://github.com/adafruit/Adafruit_CircuitPython_ST7789
|
||||||
@ -188,14 +187,15 @@
|
|||||||
[submodule "frozen/Adafruit_CircuitPython_APDS9960"]
|
[submodule "frozen/Adafruit_CircuitPython_APDS9960"]
|
||||||
path = frozen/Adafruit_CircuitPython_APDS9960
|
path = frozen/Adafruit_CircuitPython_APDS9960
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_APDS9960
|
url = https://github.com/adafruit/Adafruit_CircuitPython_APDS9960
|
||||||
|
[submodule "ports/broadcom/peripherals"]
|
||||||
|
path = ports/broadcom/peripherals
|
||||||
|
url = https://github.com/adafruit/broadcom-peripherals.git
|
||||||
|
branch = main-build
|
||||||
[submodule "rpi-firmware"]
|
[submodule "rpi-firmware"]
|
||||||
path = ports/broadcom/firmware
|
path = ports/broadcom/firmware
|
||||||
url = https://github.com/raspberrypi/rpi-firmware.git
|
url = https://github.com/raspberrypi/rpi-firmware.git
|
||||||
branch = master
|
branch = master
|
||||||
shallow = true
|
shallow = true
|
||||||
[submodule "lib/adafruit_floppy"]
|
|
||||||
path = lib/adafruit_floppy
|
|
||||||
url = https://github.com/adafruit/Adafruit_Floppy
|
|
||||||
[submodule "ports/stm/st_driver/cmsis_device_f4"]
|
[submodule "ports/stm/st_driver/cmsis_device_f4"]
|
||||||
path = ports/stm/st_driver/cmsis_device_f4
|
path = ports/stm/st_driver/cmsis_device_f4
|
||||||
url = https://github.com/STMicroelectronics/cmsis_device_f4.git
|
url = https://github.com/STMicroelectronics/cmsis_device_f4.git
|
||||||
@ -283,77 +283,3 @@
|
|||||||
[submodule "frozen/Adafruit_CircuitPython_FakeRequests"]
|
[submodule "frozen/Adafruit_CircuitPython_FakeRequests"]
|
||||||
path = frozen/Adafruit_CircuitPython_FakeRequests
|
path = frozen/Adafruit_CircuitPython_FakeRequests
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_FakeRequests.git
|
url = https://github.com/adafruit/Adafruit_CircuitPython_FakeRequests.git
|
||||||
[submodule "frozen/pew-pewpew-lcd"]
|
|
||||||
path = frozen/pew-pewpew-lcd
|
|
||||||
url = https://github.com/pypewpew/pew-pewpew-lcd.git
|
|
||||||
[submodule "frozen/mixgo_cp_lib"]
|
|
||||||
path = frozen/mixgo_cp_lib
|
|
||||||
url = https://github.com/dahanzimin/circuitpython_lib.git
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_IS31FL3731"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_IS31FL3731
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_IS31FL3731.git
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_Ticks"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_Ticks
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_Ticks.git
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_asyncio"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_asyncio
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_asyncio.git
|
|
||||||
[submodule "frozen/circuitpython_ef_music"]
|
|
||||||
path = frozen/circuitpython_ef_music
|
|
||||||
url = https://github.com/elecfreaks/circuitpython_ef_music.git
|
|
||||||
[submodule "frozen/circuitpython_picoed"]
|
|
||||||
path = frozen/circuitpython_picoed
|
|
||||||
url = https://github.com/elecfreaks/circuitpython_picoed.git
|
|
||||||
[submodule "ports/raspberrypi/lib/cyw43-driver"]
|
|
||||||
path = ports/raspberrypi/lib/cyw43-driver
|
|
||||||
url = https://github.com/georgerobotics/cyw43-driver.git
|
|
||||||
[submodule "ports/raspberrypi/lib/lwip"]
|
|
||||||
path = ports/raspberrypi/lib/lwip
|
|
||||||
url = https://github.com/adafruit/lwip.git
|
|
||||||
branch = circuitpython8
|
|
||||||
[submodule "lib/mbedtls"]
|
|
||||||
path = lib/mbedtls
|
|
||||||
url = https://github.com/ARMmbed/mbedtls.git
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_UC8151D"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_UC8151D
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_UC8151D
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_SSD1680"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_SSD1680
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_SSD1680
|
|
||||||
[submodule "ports/broadcom/peripherals"]
|
|
||||||
path = ports/broadcom/peripherals
|
|
||||||
url = https://github.com/adafruit/broadcom-peripherals.git
|
|
||||||
branch = main-build
|
|
||||||
[submodule "ports/silabs/gecko_sdk"]
|
|
||||||
path = ports/silabs/gecko_sdk
|
|
||||||
url = https://github.com/SiliconLabs/gecko_sdk.git
|
|
||||||
branch = v4.2.1
|
|
||||||
[submodule "ports/silabs/tools/slc_cli_linux"]
|
|
||||||
path = ports/silabs/tools/slc_cli_linux
|
|
||||||
url = https://github.com/SiliconLabs/circuitpython_slc_cli_linux
|
|
||||||
[submodule "ports/raspberrypi/lib/PicoDVI"]
|
|
||||||
path = ports/raspberrypi/lib/PicoDVI
|
|
||||||
url = https://github.com/circuitpython/PicoDVI.git
|
|
||||||
branch = circuitpython
|
|
||||||
[submodule "frozen/circuitpython-pcf85063a"]
|
|
||||||
path = frozen/circuitpython-pcf85063a
|
|
||||||
url = https://github.com/bablokb/circuitpython-pcf85063a
|
|
||||||
[submodule "frozen/Adafruit_CircuitPython_Wave"]
|
|
||||||
path = frozen/Adafruit_CircuitPython_Wave
|
|
||||||
url = https://github.com/adafruit/Adafruit_CircuitPython_Wave.git
|
|
||||||
[submodule "ports/raspberrypi/lib/Pico-PIO-USB"]
|
|
||||||
path = ports/raspberrypi/lib/Pico-PIO-USB
|
|
||||||
url = https://github.com/sekigon-gonnoc/Pico-PIO-USB.git
|
|
||||||
branch = main
|
|
||||||
[submodule "lib/micropython-lib"]
|
|
||||||
path = lib/micropython-lib
|
|
||||||
url = https://github.com/micropython/micropython-lib.git
|
|
||||||
[submodule "lib/certificates"]
|
|
||||||
path = lib/certificates
|
|
||||||
url = https://github.com/adafruit/certificates
|
|
||||||
[submodule "lib/tlsf"]
|
|
||||||
path = lib/tlsf
|
|
||||||
url = https://github.com/espressif/tlsf.git
|
|
||||||
[submodule "frozen/CircuitPython_AXP313A"]
|
|
||||||
path = frozen/CircuitPython_AXP313A
|
|
||||||
url = https://github.com/bill88t/CircuitPython_AXP313A
|
|
||||||
|
@ -8,22 +8,9 @@ repos:
|
|||||||
hooks:
|
hooks:
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*)'
|
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*|ports/espressif/esp-idf-config/.*|ports/espressif/boards/.*/sdkconfig)'
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*|lib/mbedtls_errors/generate_errors.diff)'
|
exclude: '^(tests/.*\.exp|tests/cmdline/.*|tests/.*/data/.*)'
|
||||||
- repo: https://github.com/codespell-project/codespell
|
|
||||||
rev: v2.2.4
|
|
||||||
hooks:
|
|
||||||
- id: codespell
|
|
||||||
args: [-w]
|
|
||||||
exclude: |
|
|
||||||
(?x)^(
|
|
||||||
locale/|
|
|
||||||
lib/|
|
|
||||||
tests/unicode/data/utf-8_invalid.txt|
|
|
||||||
tests/extmod/data/qr.pgm|
|
|
||||||
tests/basics/bytearray_byte_operations.py
|
|
||||||
)
|
|
||||||
- repo: local
|
- repo: local
|
||||||
hooks:
|
hooks:
|
||||||
- id: translations
|
- id: translations
|
||||||
|
@ -12,9 +12,10 @@ build:
|
|||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
tools:
|
tools:
|
||||||
python: "3"
|
python: "3"
|
||||||
jobs:
|
|
||||||
post_install:
|
submodules:
|
||||||
- python tools/ci_fetch_deps.py docs
|
include:
|
||||||
|
- extmod/ulab
|
||||||
|
|
||||||
formats:
|
formats:
|
||||||
- pdf
|
- pdf
|
||||||
|
20
BUILDING.md
20
BUILDING.md
@ -27,11 +27,7 @@ This project has a bunch of git submodules. You will need to update them regula
|
|||||||
|
|
||||||
In the root folder of the CircuitPython repository, execute the following:
|
In the root folder of the CircuitPython repository, execute the following:
|
||||||
|
|
||||||
make fetch-all-submodules
|
make fetch-submodules
|
||||||
|
|
||||||
Or, in the ports directory for the particular port you are building, do:
|
|
||||||
|
|
||||||
make fetch-port-submodules
|
|
||||||
|
|
||||||
### Required Python Packages
|
### Required Python Packages
|
||||||
|
|
||||||
@ -39,8 +35,6 @@ Failing to install these will prevent from properly building.
|
|||||||
|
|
||||||
pip3 install -r requirements-dev.txt
|
pip3 install -r requirements-dev.txt
|
||||||
|
|
||||||
If you run into an error installing minify_html, you may need to install `rust`.
|
|
||||||
|
|
||||||
### mpy-cross
|
### mpy-cross
|
||||||
|
|
||||||
As part of the build process, mpy-cross is needed to compile .py files into .mpy files.
|
As part of the build process, mpy-cross is needed to compile .py files into .mpy files.
|
||||||
@ -48,7 +42,7 @@ To compile (or recompile) mpy-cross:
|
|||||||
|
|
||||||
make -C mpy-cross
|
make -C mpy-cross
|
||||||
|
|
||||||
## Building
|
# Building
|
||||||
|
|
||||||
There a number of ports of CircuitPython! To build for your board, change to the appropriate ports directory and build.
|
There a number of ports of CircuitPython! To build for your board, change to the appropriate ports directory and build.
|
||||||
|
|
||||||
@ -64,7 +58,7 @@ If you aren't sure what boards exist, have a peek in the boards subdirectory of
|
|||||||
If you have a fast computer with many cores, consider adding `-j` to your build flags, such as `-j17` on
|
If you have a fast computer with many cores, consider adding `-j` to your build flags, such as `-j17` on
|
||||||
a 6-core 12-thread machine.
|
a 6-core 12-thread machine.
|
||||||
|
|
||||||
## Testing
|
# Testing
|
||||||
|
|
||||||
If you are working on changes to the core language, you might find it useful to run the test suite.
|
If you are working on changes to the core language, you might find it useful to run the test suite.
|
||||||
The test suite in the top level `tests` directory. It needs the unix port to run.
|
The test suite in the top level `tests` directory. It needs the unix port to run.
|
||||||
@ -76,7 +70,7 @@ The test suite in the top level `tests` directory. It needs the unix port to ru
|
|||||||
Then you can run the test suite:
|
Then you can run the test suite:
|
||||||
|
|
||||||
cd ../../tests
|
cd ../../tests
|
||||||
./run-tests.py
|
./run-tests
|
||||||
|
|
||||||
A successful run will say something like
|
A successful run will say something like
|
||||||
|
|
||||||
@ -84,7 +78,7 @@ A successful run will say something like
|
|||||||
676 tests passed
|
676 tests passed
|
||||||
30 tests skipped: buffered_writer builtin_help builtin_range_binop class_delattr_setattr cmd_parsetree extra_coverage framebuf1 framebuf16 framebuf2 framebuf4 framebuf8 framebuf_subclass mpy_invalid namedtuple_asdict non_compliant resource_stream schedule sys_getsizeof urandom_extra ure_groups ure_span ure_sub ure_sub_unmatched vfs_basic vfs_fat_fileio1 vfs_fat_fileio2 vfs_fat_more vfs_fat_oldproto vfs_fat_ramdisk vfs_userfs
|
30 tests skipped: buffered_writer builtin_help builtin_range_binop class_delattr_setattr cmd_parsetree extra_coverage framebuf1 framebuf16 framebuf2 framebuf4 framebuf8 framebuf_subclass mpy_invalid namedtuple_asdict non_compliant resource_stream schedule sys_getsizeof urandom_extra ure_groups ure_span ure_sub ure_sub_unmatched vfs_basic vfs_fat_fileio1 vfs_fat_fileio2 vfs_fat_more vfs_fat_oldproto vfs_fat_ramdisk vfs_userfs
|
||||||
|
|
||||||
## Debugging
|
# Debugging
|
||||||
|
|
||||||
The easiest way to debug CircuitPython on hardware is with a JLink device, JLinkGDBServer, and an appropriate GDB.
|
The easiest way to debug CircuitPython on hardware is with a JLink device, JLinkGDBServer, and an appropriate GDB.
|
||||||
Instructions can be found at https://learn.adafruit.com/debugging-the-samd21-with-gdb
|
Instructions can be found at https://learn.adafruit.com/debugging-the-samd21-with-gdb
|
||||||
@ -99,7 +93,7 @@ Example:
|
|||||||
If your port/build includes `arm-none-eabi-gdb-py`, consider using it instead, as it can be used for better register
|
If your port/build includes `arm-none-eabi-gdb-py`, consider using it instead, as it can be used for better register
|
||||||
debugging with https://github.com/bnahill/PyCortexMDebug
|
debugging with https://github.com/bnahill/PyCortexMDebug
|
||||||
|
|
||||||
## Code Quality Checks
|
# Code Quality Checks
|
||||||
|
|
||||||
We apply code quality checks using pre-commit. Install pre-commit once per system with
|
We apply code quality checks using pre-commit. Install pre-commit once per system with
|
||||||
|
|
||||||
@ -113,7 +107,7 @@ Pre-commit also requires some additional programs to be installed through your p
|
|||||||
|
|
||||||
* Standard Unix tools such as make, find, etc
|
* Standard Unix tools such as make, find, etc
|
||||||
* The gettext package, any modern version
|
* The gettext package, any modern version
|
||||||
* uncrustify version 0.71 (0.72 is also tested and OK; 0.75 is not OK)
|
* uncrustify version 0.71 (0.72 is also tested)
|
||||||
|
|
||||||
Each time you create a git commit, the pre-commit quality checks will be run. You can also run them e.g., with `pre-commit run foo.c` or `pre-commit run --all` to run on all files whether modified or not.
|
Each time you create a git commit, the pre-commit quality checks will be run. You can also run them e.g., with `pre-commit run foo.c` or `pre-commit run --all` to run on all files whether modified or not.
|
||||||
|
|
||||||
|
@ -123,7 +123,7 @@ accordingly.
|
|||||||
|
|
||||||
## Attribution
|
## Attribution
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org),
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||||
version 1.4, available at
|
version 1.4, available at
|
||||||
<https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>,
|
<https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>,
|
||||||
and the [Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html).
|
and the [Rust Code of Conduct](https://www.rust-lang.org/en-US/conduct.html).
|
||||||
|
@ -38,9 +38,9 @@ For SAMD21 debugging workflow tips check out [this learn guide](https://learn.ad
|
|||||||
Scott Shawcroft ([@tannewt](https://github.com/tannewt)) is the lead developer of CircuitPython
|
Scott Shawcroft ([@tannewt](https://github.com/tannewt)) is the lead developer of CircuitPython
|
||||||
and is sponsored by [Adafruit Industries LLC](https://adafruit.com). Scott is usually available
|
and is sponsored by [Adafruit Industries LLC](https://adafruit.com). Scott is usually available
|
||||||
during US West Coast working hours. Dan Halbert ([@dhalbert](https://github.com/dhalbert)) and
|
during US West Coast working hours. Dan Halbert ([@dhalbert](https://github.com/dhalbert)) and
|
||||||
Jeff Epler ([@jepler](https://github.com/jepler)) are also sponsored by [Adafruit Industries
|
Kattni Rembor ([@kattni](https://github.com/kattni)) are also sponsored by [Adafruit Industries
|
||||||
LLC](https://adafruit.com) and are usually available during US daytime hours including some
|
LLC](https://adafruit.com) and are usually available during US East Coast daytime hours including
|
||||||
weekends.
|
some weekends.
|
||||||
|
|
||||||
They are all reachable on [Discord](https://adafru.it/discord), GitHub issues and the [Adafruit
|
They are all reachable on [Discord](https://adafru.it/discord), GitHub issues and the [Adafruit
|
||||||
support forum](https://forums.adafruit.com/viewforum.php?f=60).
|
support forum](https://forums.adafruit.com/viewforum.php?f=60).
|
||||||
|
70
LICENSE
70
LICENSE
@ -1,6 +1,6 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2013-2023 Damien P. George
|
Copyright (c) 2013-2021 Damien P. George
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
@ -17,5 +17,69 @@ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
SOFTWARE.
|
THE SOFTWARE.
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
Unless specified otherwise (see below), the above license and copyright applies
|
||||||
|
to all files in this repository.
|
||||||
|
|
||||||
|
Individual files may include additional copyright holders.
|
||||||
|
|
||||||
|
The various ports of MicroPython may include third-party software that is
|
||||||
|
licensed under different terms. These licenses are summarised in the tree
|
||||||
|
below, please refer to these files and directories for further license and
|
||||||
|
copyright information. Note that (L)GPL-licensed code listed below is only
|
||||||
|
used during the build process and is not part of the compiled source code.
|
||||||
|
|
||||||
|
/ (MIT)
|
||||||
|
/drivers
|
||||||
|
/cc3000 (BSD-3-clause)
|
||||||
|
/cc3100 (BSD-3-clause)
|
||||||
|
/wiznet5k (BSD-3-clause)
|
||||||
|
/lib
|
||||||
|
/asf4 (Apache-2.0)
|
||||||
|
/axtls (BSD-3-clause)
|
||||||
|
/config
|
||||||
|
/scripts
|
||||||
|
/config (GPL-2.0-or-later)
|
||||||
|
/Rules.mak (GPL-2.0)
|
||||||
|
/berkeley-db-1xx (BSD-4-clause)
|
||||||
|
/btstack (See btstack/LICENSE)
|
||||||
|
/cmsis (BSD-3-clause)
|
||||||
|
/crypto-algorithms (NONE)
|
||||||
|
/libhydrogen (ISC)
|
||||||
|
/littlefs (BSD-3-clause)
|
||||||
|
/lwip (BSD-3-clause)
|
||||||
|
/mynewt-nimble (Apache-2.0)
|
||||||
|
/nrfx (BSD-3-clause)
|
||||||
|
/nxp_driver (BSD-3-Clause)
|
||||||
|
/oofatfs (BSD-1-clause)
|
||||||
|
/pico-sdk (BSD-3-clause)
|
||||||
|
/re15 (BSD-3-clause)
|
||||||
|
/stm32lib (BSD-3-clause)
|
||||||
|
/tinytest (BSD-3-clause)
|
||||||
|
/tinyusb (MIT)
|
||||||
|
/uzlib (Zlib)
|
||||||
|
/logo (uses OFL-1.1)
|
||||||
|
/ports
|
||||||
|
/cc3200
|
||||||
|
/hal (BSD-3-clause)
|
||||||
|
/simplelink (BSD-3-clause)
|
||||||
|
/FreeRTOS (GPL-2.0 with FreeRTOS exception)
|
||||||
|
/stm32
|
||||||
|
/usbd*.c (MCD-ST Liberty SW License Agreement V2)
|
||||||
|
/stm32_it.* (MIT + BSD-3-clause)
|
||||||
|
/system_stm32*.c (MIT + BSD-3-clause)
|
||||||
|
/boards
|
||||||
|
/startup_stm32*.s (BSD-3-clause)
|
||||||
|
/*/stm32*.h (BSD-3-clause)
|
||||||
|
/usbdev (MCD-ST Liberty SW License Agreement V2)
|
||||||
|
/usbhost (MCD-ST Liberty SW License Agreement V2)
|
||||||
|
/teensy
|
||||||
|
/core (PJRC.COM)
|
||||||
|
/zephyr
|
||||||
|
/src (Apache-2.0)
|
||||||
|
/tools
|
||||||
|
/dfu.py (LGPL-3.0-only)
|
||||||
|
@ -1,85 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2013-2022 Damien P. George
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Unless specified otherwise (see below), the above license and copyright applies
|
|
||||||
to all files in this repository.
|
|
||||||
|
|
||||||
Individual files may include additional copyright holders.
|
|
||||||
|
|
||||||
The various ports of MicroPython may include third-party software that is
|
|
||||||
licensed under different terms. These licenses are summarised in the tree
|
|
||||||
below, please refer to these files and directories for further license and
|
|
||||||
copyright information. Note that (L)GPL-licensed code listed below is only
|
|
||||||
used during the build process and is not part of the compiled source code.
|
|
||||||
|
|
||||||
/ (MIT)
|
|
||||||
/drivers
|
|
||||||
/cc3000 (BSD-3-clause)
|
|
||||||
/cc3100 (BSD-3-clause)
|
|
||||||
/wiznet5k (BSD-3-clause)
|
|
||||||
/lib
|
|
||||||
/asf4 (Apache-2.0)
|
|
||||||
/axtls (BSD-3-clause)
|
|
||||||
/config
|
|
||||||
/scripts
|
|
||||||
/config (GPL-2.0-or-later)
|
|
||||||
/Rules.mak (GPL-2.0)
|
|
||||||
/berkeley-db-1xx (BSD-4-clause)
|
|
||||||
/btstack (See btstack/LICENSE)
|
|
||||||
/cmsis (BSD-3-clause)
|
|
||||||
/crypto-algorithms (NONE)
|
|
||||||
/libhydrogen (ISC)
|
|
||||||
/littlefs (BSD-3-clause)
|
|
||||||
/lwip (BSD-3-clause)
|
|
||||||
/mynewt-nimble (Apache-2.0)
|
|
||||||
/nrfx (BSD-3-clause)
|
|
||||||
/nxp_driver (BSD-3-Clause)
|
|
||||||
/oofatfs (BSD-1-clause)
|
|
||||||
/pico-sdk (BSD-3-clause)
|
|
||||||
/re15 (BSD-3-clause)
|
|
||||||
/stm32lib (BSD-3-clause)
|
|
||||||
/tinytest (BSD-3-clause)
|
|
||||||
/tinyusb (MIT)
|
|
||||||
/uzlib (Zlib)
|
|
||||||
/logo (uses OFL-1.1)
|
|
||||||
/ports
|
|
||||||
/cc3200
|
|
||||||
/hal (BSD-3-clause)
|
|
||||||
/simplelink (BSD-3-clause)
|
|
||||||
/FreeRTOS (GPL-2.0 with FreeRTOS exception)
|
|
||||||
/stm32
|
|
||||||
/usbd*.c (MCD-ST Liberty SW License Agreement V2)
|
|
||||||
/stm32_it.* (MIT + BSD-3-clause)
|
|
||||||
/system_stm32*.c (MIT + BSD-3-clause)
|
|
||||||
/boards
|
|
||||||
/startup_stm32*.s (BSD-3-clause)
|
|
||||||
/*/stm32*.h (BSD-3-clause)
|
|
||||||
/usbdev (MCD-ST Liberty SW License Agreement V2)
|
|
||||||
/usbhost (MCD-ST Liberty SW License Agreement V2)
|
|
||||||
/teensy
|
|
||||||
/core (PJRC.COM)
|
|
||||||
/zephyr
|
|
||||||
/src (Apache-2.0)
|
|
||||||
/tools
|
|
||||||
/dfu.py (LGPL-3.0-only)
|
|
31
Makefile
31
Makefile
@ -83,15 +83,13 @@ help:
|
|||||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||||
@echo " linkcheck to check all external links for integrity"
|
@echo " linkcheck to check all external links for integrity"
|
||||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
@echo " fetch-all-submodules to fetch submodules for all ports"
|
|
||||||
@echo " remove-all-submodules remove all submodules, including files and .git/ data"
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -rf $(BUILDDIR)/*
|
rm -rf $(BUILDDIR)/*
|
||||||
rm -rf autoapi
|
rm -rf autoapi
|
||||||
rm -rf $(STUBDIR) $(DISTDIR) *.egg-info
|
rm -rf $(STUBDIR) $(DISTDIR) *.egg-info
|
||||||
|
|
||||||
html:
|
html: stubs
|
||||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
@echo
|
@echo
|
||||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
@ -226,9 +224,8 @@ pseudoxml:
|
|||||||
.PHONY: all-source
|
.PHONY: all-source
|
||||||
all-source:
|
all-source:
|
||||||
|
|
||||||
TRANSLATE_COMMAND=find $(TRANSLATE_SOURCES) -type d \( $(TRANSLATE_SOURCES_EXC) \) -prune -o -type f \( -iname "*.c" -o -iname "*.h" \) -print | (LC_ALL=C sort) | xgettext -x locale/synthetic.pot -f- -L C -s --add-location=file --keyword=MP_ERROR_TEXT -o - | sed -e '/"POT-Creation-Date: /d'
|
|
||||||
locale/circuitpython.pot: all-source
|
locale/circuitpython.pot: all-source
|
||||||
$(TRANSLATE_COMMAND) > $@
|
find $(TRANSLATE_SOURCES) -type d \( $(TRANSLATE_SOURCES_EXC) \) -prune -o -type f \( -iname "*.c" -o -iname "*.h" \) -print | (LC_ALL=C sort) | xgettext -f- -L C -s --add-location=file --keyword=translate --keyword=MP_ERROR_TEXT -o - | sed -e '/"POT-Creation-Date: /d' > $@
|
||||||
|
|
||||||
# Historically, `make translate` updated the .pot file and ran msgmerge.
|
# Historically, `make translate` updated the .pot file and ran msgmerge.
|
||||||
# However, this was a frequent source of merge conflicts. Weblate can perform
|
# However, this was a frequent source of merge conflicts. Weblate can perform
|
||||||
@ -253,7 +250,7 @@ merge-translate:
|
|||||||
|
|
||||||
.PHONY: check-translate
|
.PHONY: check-translate
|
||||||
check-translate:
|
check-translate:
|
||||||
$(TRANSLATE_COMMAND) > locale/circuitpython.pot.tmp
|
find $(TRANSLATE_SOURCES) -type d \( $(TRANSLATE_SOURCES_EXC) \) -prune -o -type f \( -iname "*.c" -o -iname "*.h" \) -print | (LC_ALL=C sort) | xgettext -f- -L C -s --add-location=file --keyword=translate --keyword=MP_ERROR_TEXT -o circuitpython.pot.tmp -p locale
|
||||||
$(PYTHON) tools/check_translations.py locale/circuitpython.pot.tmp locale/circuitpython.pot; status=$$?; rm -f locale/circuitpython.pot.tmp; exit $$status
|
$(PYTHON) tools/check_translations.py locale/circuitpython.pot.tmp locale/circuitpython.pot; status=$$?; rm -f locale/circuitpython.pot.tmp; exit $$status
|
||||||
|
|
||||||
.PHONY: stubs
|
.PHONY: stubs
|
||||||
@ -268,7 +265,7 @@ stubs:
|
|||||||
@cp setup.py-stubs circuitpython-stubs/setup.py
|
@cp setup.py-stubs circuitpython-stubs/setup.py
|
||||||
@cp README.rst-stubs circuitpython-stubs/README.rst
|
@cp README.rst-stubs circuitpython-stubs/README.rst
|
||||||
@cp MANIFEST.in-stubs circuitpython-stubs/MANIFEST.in
|
@cp MANIFEST.in-stubs circuitpython-stubs/MANIFEST.in
|
||||||
@$(PYTHON) -m build circuitpython-stubs
|
@(cd circuitpython-stubs && $(PYTHON) setup.py -q sdist)
|
||||||
|
|
||||||
.PHONY: check-stubs
|
.PHONY: check-stubs
|
||||||
check-stubs: stubs
|
check-stubs: stubs
|
||||||
@ -325,16 +322,10 @@ clean-nrf:
|
|||||||
clean-stm:
|
clean-stm:
|
||||||
$(MAKE) -C ports/stm BOARD=feather_stm32f405_express clean
|
$(MAKE) -C ports/stm BOARD=feather_stm32f405_express clean
|
||||||
|
|
||||||
|
.PHONY: fetch-submodules
|
||||||
.PHONY: fetch-all-submodules
|
fetch-submodules:
|
||||||
fetch-all-submodules:
|
# This update will fail because the commits we need aren't the latest on the
|
||||||
$(PYTHON) tools/ci_fetch_deps.py all
|
# branch. We can ignore that though because we fix it with the second command.
|
||||||
|
# (Only works for git servers that allow sha fetches.)
|
||||||
.PHONY: remove-all-submodules
|
git submodule update --init -N --depth 1 || true
|
||||||
remove-all-submodules:
|
git submodule foreach 'git fetch --tags --depth 1 origin $$sha1 && git checkout -q $$sha1'
|
||||||
git submodule deinit -f --all
|
|
||||||
rm -rf .git/modules/*
|
|
||||||
|
|
||||||
.PHONY: fetch-tags
|
|
||||||
fetch-tags:
|
|
||||||
git fetch --tags --recurse-submodules=no --shallow-since="2023-02-01" https://github.com/adafruit/circuitpython HEAD
|
|
||||||
|
46
README.rst
46
README.rst
@ -56,6 +56,10 @@ Specifically useful documentation when starting out:
|
|||||||
- `CircuitPython Essentials <https://learn.adafruit.com/circuitpython-essentials>`__
|
- `CircuitPython Essentials <https://learn.adafruit.com/circuitpython-essentials>`__
|
||||||
- `Example Code <https://github.com/adafruit/Adafruit_Learning_System_Guides/tree/master/CircuitPython_Essentials>`__
|
- `Example Code <https://github.com/adafruit/Adafruit_Learning_System_Guides/tree/master/CircuitPython_Essentials>`__
|
||||||
|
|
||||||
|
Code Search
|
||||||
|
------------
|
||||||
|
GitHub doesn't currently support code search on forks. Therefore, CircuitPython doesn't have code search through GitHub because it is a fork of MicroPython. Luckily, `SourceGraph <https://sourcegraph.com/github.com/adafruit/circuitpython>`_ has free code search for public repos like CircuitPython. So, visit `sourcegraph.com/github.com/adafruit/circuitpython <https://sourcegraph.com/github.com/adafruit/circuitpython>`_ to search the CircuitPython codebase online.
|
||||||
|
|
||||||
Contributing
|
Contributing
|
||||||
------------
|
------------
|
||||||
|
|
||||||
@ -80,19 +84,15 @@ common set of requirements.
|
|||||||
|
|
||||||
If you'd like to use the term "CircuitPython" and Blinka for your product here is what we ask:
|
If you'd like to use the term "CircuitPython" and Blinka for your product here is what we ask:
|
||||||
|
|
||||||
- Your product is supported by the primary
|
* Your product is supported by the primary
|
||||||
`"adafruit/circuitpython" <https://github.com/adafruit/circuitpython>`_ repo. This way we can
|
`"adafruit/circuitpython" <https://github.com/adafruit/circuitpython>`_ repo. This way we can
|
||||||
update any custom code as we update the CircuitPython internals.
|
update any custom code as we update the CircuitPython internals.
|
||||||
- Your product is listed on `circuitpython.org <https://circuitpython.org>`__ (source
|
* Your product is listed on `circuitpython.org <https://circuitpython.org>`__ (source
|
||||||
`here <https://github.com/adafruit/circuitpython-org/>`_). This is to ensure that a user of your
|
`here <https://github.com/adafruit/circuitpython-org/>`_). This is to ensure that a user of your
|
||||||
product can always download the latest version of CircuitPython from the standard place.
|
product can always download the latest version of CircuitPython from the standard place.
|
||||||
- Your product supports at least one standard "`Workflow <https://docs.circuitpython.org/en/latest/docs/workflows.html>`__" for serial and file access:
|
* Your product has a user accessible USB plug which appears as a CIRCUITPY drive when plugged in
|
||||||
|
AND/OR provides file and serial access over Bluetooth Low Energy. Boards that do not support USB
|
||||||
- With a user accessible USB plug which appears as a CIRCUITPY drive when plugged in.
|
should be clearly marked as BLE-only CircuitPython.
|
||||||
- With file and serial access over Bluetooth Low Energy using the BLE Workflow.
|
|
||||||
- With file access over WiFi using the WiFi Workflow with serial access over USB and/or WebSocket.
|
|
||||||
|
|
||||||
- Boards that do not support the USB Workflow should be clearly marked.
|
|
||||||
|
|
||||||
If you choose not to meet these requirements, then we ask you call your version of CircuitPython
|
If you choose not to meet these requirements, then we ask you call your version of CircuitPython
|
||||||
something else (for example, SuperDuperPython) and not use the Blinka logo. You can say it is
|
something else (for example, SuperDuperPython) and not use the Blinka logo. You can say it is
|
||||||
@ -120,7 +120,7 @@ Behavior
|
|||||||
make each file independent from each other.
|
make each file independent from each other.
|
||||||
|
|
||||||
- ``boot.py`` runs only once on start up before
|
- ``boot.py`` runs only once on start up before
|
||||||
workflows are initialized. This lays the ground work for configuring USB at
|
USB is initialized. This lays the ground work for configuring USB at
|
||||||
startup rather than it being fixed. Since serial is not available,
|
startup rather than it being fixed. Since serial is not available,
|
||||||
output is written to ``boot_out.txt``.
|
output is written to ``boot_out.txt``.
|
||||||
- ``code.py`` (or ``main.py``) is run after every reload until it
|
- ``code.py`` (or ``main.py``) is run after every reload until it
|
||||||
@ -129,33 +129,18 @@ Behavior
|
|||||||
``code.py`` **in the REPL anymore, as the REPL is a fresh vm.** CircuitPython's goal for this
|
``code.py`` **in the REPL anymore, as the REPL is a fresh vm.** CircuitPython's goal for this
|
||||||
change includes reducing confusion about pins and memory being used.
|
change includes reducing confusion about pins and memory being used.
|
||||||
- After the main code is finished the REPL can be entered by pressing any key.
|
- After the main code is finished the REPL can be entered by pressing any key.
|
||||||
- If the file ``repl.py`` exists, it is executed before the REPL Prompt is shown
|
|
||||||
- In safe mode this functionality is disabled, to ensure the REPL Prompt can always be reached
|
|
||||||
- Autoreload state will be maintained across reload.
|
- Autoreload state will be maintained across reload.
|
||||||
|
|
||||||
- Adds a safe mode that does not run user code after a hard crash or brown out. This makes it
|
- Adds a safe mode that does not run user code after a hard crash or brown out. This makes it
|
||||||
possible to fix code that causes nasty crashes by making it available through mass storage after
|
possible to fix code that causes nasty crashes by making it available through mass storage after
|
||||||
the crash. A reset (the button) is needed after it's fixed to get back into normal mode.
|
the crash. A reset (the button) is needed after it's fixed to get back into normal mode.
|
||||||
- Safe mode may be handled programmatically by providing a ``safemode.py``.
|
|
||||||
``safemode.py`` is run if the board has reset due to entering safe mode, unless the safe mode
|
|
||||||
initiated by the user by pressing button(s).
|
|
||||||
USB is not available so nothing can be printed.
|
|
||||||
``safemode.py`` can determine why the safe mode occurred
|
|
||||||
using ``supervisor.runtime.safe_mode_reason``, and take appropriate action. For instance,
|
|
||||||
if a hard crash occurred, ``safemode.py`` may do a ``microcontroller.reset()``
|
|
||||||
to automatically restart despite the crash.
|
|
||||||
If the battery is low, but is being charged, ``safemode.py`` may put the board in deep sleep
|
|
||||||
for a while. Or it may simply reset, and have ``code.py`` check the voltage and do the sleep.
|
|
||||||
- RGB status LED indicating CircuitPython state.
|
- RGB status LED indicating CircuitPython state.
|
||||||
- One green flash - code completed without error.
|
- Re-runs ``code.py`` or other main file after file system writes over USB mass storage. (Disable with
|
||||||
- Two red flashes - code ended due to an exception.
|
|
||||||
- Three yellow flashes - safe mode. May be due to CircuitPython internal error.
|
|
||||||
- Re-runs ``code.py`` or other main file after file system writes by a workflow. (Disable with
|
|
||||||
``supervisor.disable_autoreload()``)
|
``supervisor.disable_autoreload()``)
|
||||||
- Autoreload is disabled while the REPL is active.
|
- Autoreload is disabled while the REPL is active.
|
||||||
- ``code.py`` may also be named ``code.txt``, ``main.py``, or ``main.txt``.
|
- Main is one of these: ``code.txt``, ``code.py``, ``main.py``,
|
||||||
- ``boot.py`` may also be named ``boot.txt``.
|
``main.txt``
|
||||||
- ``safemode.py`` may also be named ``safemode.txt``.
|
- Boot is one of these: ``boot.py``, ``boot.txt``
|
||||||
|
|
||||||
API
|
API
|
||||||
~~~
|
~~~
|
||||||
@ -228,12 +213,11 @@ Supported Support status
|
|||||||
================ ============================================================
|
================ ============================================================
|
||||||
atmel-samd ``SAMD21`` stable | ``SAMD51`` stable
|
atmel-samd ``SAMD21`` stable | ``SAMD51`` stable
|
||||||
cxd56 stable
|
cxd56 stable
|
||||||
espressif ``ESP32`` beta | ``ESP32-C3`` beta | ``ESP32-S2`` stable | ``ESP32-S3`` beta
|
espressif ``ESP32-C3`` beta | ``ESP32-S2`` stable | ``ESP32-S3`` beta
|
||||||
litex alpha
|
litex alpha
|
||||||
mimxrt10xx alpha
|
mimxrt10xx alpha
|
||||||
nrf stable
|
nrf stable
|
||||||
raspberrypi stable
|
raspberrypi stable
|
||||||
silabs (efr32) alpha
|
|
||||||
stm ``F4`` stable | ``others`` beta
|
stm ``F4`` stable | ``others`` beta
|
||||||
unix alpha
|
unix alpha
|
||||||
================ ============================================================
|
================ ============================================================
|
||||||
|
76
conf.py
76
conf.py
@ -30,7 +30,6 @@ from collections import defaultdict
|
|||||||
from sphinx.transforms import SphinxTransform
|
from sphinx.transforms import SphinxTransform
|
||||||
from docutils import nodes
|
from docutils import nodes
|
||||||
from sphinx import addnodes
|
from sphinx import addnodes
|
||||||
from sphinx.ext import intersphinx
|
|
||||||
|
|
||||||
tools_describe = str(pathlib.Path(__file__).parent / "tools/describe")
|
tools_describe = str(pathlib.Path(__file__).parent / "tools/describe")
|
||||||
|
|
||||||
@ -53,14 +52,10 @@ subprocess.check_output(["make", "stubs"])
|
|||||||
#modules_support_matrix = shared_bindings_matrix.support_matrix_excluded_boards()
|
#modules_support_matrix = shared_bindings_matrix.support_matrix_excluded_boards()
|
||||||
modules_support_matrix = shared_bindings_matrix.support_matrix_by_board()
|
modules_support_matrix = shared_bindings_matrix.support_matrix_by_board()
|
||||||
modules_support_matrix_reverse = defaultdict(list)
|
modules_support_matrix_reverse = defaultdict(list)
|
||||||
for board, matrix_info in modules_support_matrix.items():
|
for board, modules in modules_support_matrix.items():
|
||||||
for module in matrix_info["modules"]:
|
for module in modules:
|
||||||
modules_support_matrix_reverse[module].append(board)
|
modules_support_matrix_reverse[module].append(board)
|
||||||
|
modules_support_matrix_reverse = dict((module, sorted(boards)) for module, boards in modules_support_matrix_reverse.items())
|
||||||
modules_support_matrix_reverse = dict(
|
|
||||||
(module, sorted(boards))
|
|
||||||
for module, boards in modules_support_matrix_reverse.items()
|
|
||||||
)
|
|
||||||
|
|
||||||
html_context = {
|
html_context = {
|
||||||
'support_matrix': modules_support_matrix,
|
'support_matrix': modules_support_matrix,
|
||||||
@ -78,7 +73,6 @@ needs_sphinx = '1.3'
|
|||||||
extensions = [
|
extensions = [
|
||||||
'sphinx.ext.autodoc',
|
'sphinx.ext.autodoc',
|
||||||
'sphinx.ext.doctest',
|
'sphinx.ext.doctest',
|
||||||
"sphinxcontrib.jquery",
|
|
||||||
'sphinxcontrib.rsvgconverter',
|
'sphinxcontrib.rsvgconverter',
|
||||||
'sphinx.ext.intersphinx',
|
'sphinx.ext.intersphinx',
|
||||||
'sphinx.ext.todo',
|
'sphinx.ext.todo',
|
||||||
@ -89,7 +83,7 @@ extensions = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ['templates', "docs/templates"]
|
templates_path = ['templates']
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = {
|
source_suffix = {
|
||||||
@ -173,7 +167,6 @@ exclude_patterns = ["**/build*",
|
|||||||
".env",
|
".env",
|
||||||
".venv",
|
".venv",
|
||||||
".direnv",
|
".direnv",
|
||||||
".devcontainer/Readme.md",
|
|
||||||
"data",
|
"data",
|
||||||
"docs/autoapi",
|
"docs/autoapi",
|
||||||
"docs/README.md",
|
"docs/README.md",
|
||||||
@ -202,8 +195,6 @@ exclude_patterns = ["**/build*",
|
|||||||
"ports/cxd56/spresense-exported-sdk",
|
"ports/cxd56/spresense-exported-sdk",
|
||||||
"ports/espressif/certificates",
|
"ports/espressif/certificates",
|
||||||
"ports/espressif/esp-idf",
|
"ports/espressif/esp-idf",
|
||||||
"ports/espressif/esp-camera",
|
|
||||||
"ports/espressif/esp-protocols",
|
|
||||||
"ports/espressif/.idf_tools",
|
"ports/espressif/.idf_tools",
|
||||||
"ports/espressif/peripherals",
|
"ports/espressif/peripherals",
|
||||||
"ports/litex/hw",
|
"ports/litex/hw",
|
||||||
@ -217,13 +208,11 @@ exclude_patterns = ["**/build*",
|
|||||||
"ports/nrf/peripherals",
|
"ports/nrf/peripherals",
|
||||||
"ports/nrf/usb",
|
"ports/nrf/usb",
|
||||||
"ports/raspberrypi/sdk",
|
"ports/raspberrypi/sdk",
|
||||||
"ports/raspberrypi/lib",
|
|
||||||
"ports/silabs/gecko_sdk",
|
|
||||||
"ports/silabs/tools",
|
|
||||||
"ports/stm/st_driver",
|
"ports/stm/st_driver",
|
||||||
"ports/stm/packages",
|
"ports/stm/packages",
|
||||||
"ports/stm/peripherals",
|
"ports/stm/peripherals",
|
||||||
"ports/stm/ref",
|
"ports/stm/ref",
|
||||||
|
"ports/unix",
|
||||||
"py",
|
"py",
|
||||||
"shared/*",
|
"shared/*",
|
||||||
"shared-bindings/util.*",
|
"shared-bindings/util.*",
|
||||||
@ -267,9 +256,19 @@ rst_epilog = """
|
|||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
import sphinx_rtd_theme
|
# on_rtd is whether we are on readthedocs.org
|
||||||
html_theme = 'sphinx_rtd_theme'
|
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
|
||||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), '.']
|
|
||||||
|
if not on_rtd: # only import and set the theme if we're building docs locally
|
||||||
|
try:
|
||||||
|
import sphinx_rtd_theme
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path(), '.']
|
||||||
|
except:
|
||||||
|
html_theme = 'default'
|
||||||
|
html_theme_path = ['.']
|
||||||
|
else:
|
||||||
|
html_theme_path = ['.']
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
@ -362,23 +361,15 @@ latex_elements = {
|
|||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
#'preamble': '',
|
#'preamble': '',
|
||||||
# Include 3 levels of headers in PDF ToC
|
# Include 3 levels of headers in PDF ToC
|
||||||
'preamble': r'''
|
'preamble': '\setcounter{tocdepth}{2}',
|
||||||
\setcounter{tocdepth}{2}
|
|
||||||
\hbadness=99999
|
|
||||||
\hfuzz=20pt
|
|
||||||
\usepackage{pdflscape}
|
|
||||||
''',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title,
|
# (source start file, target name, title,
|
||||||
# author, documentclass [howto, manual, or own class]).
|
# author, documentclass [howto, manual, or own class]).
|
||||||
latex_documents = [
|
latex_documents = [
|
||||||
("docs/pdf", 'CircuitPython.tex', 'CircuitPython Documentation',
|
(master_doc, 'CircuitPython.tex', 'CircuitPython Documentation',
|
||||||
'CircuitPython Contributors', 'manual'),
|
'CircuitPython Contributors', 'manual'),
|
||||||
# Uncomment this if you want to build a PDF of the board -> module support matrix.
|
|
||||||
# ("shared-bindings/support_matrix", 'SupportMatrix.tex', 'Board Support Matrix',
|
|
||||||
# 'CircuitPython Contributors', 'manual'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
@ -440,10 +431,8 @@ texinfo_documents = [
|
|||||||
|
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {"python": ('https://docs.python.org/3/', None),
|
intersphinx_mapping = {"cpython": ('https://docs.python.org/3/', None),
|
||||||
"register": ('https://circuitpython.readthedocs.io/projects/register/en/latest/', None),
|
"register": ('https://circuitpython.readthedocs.io/projects/register/en/latest/', None)}
|
||||||
"mcp2515": ('https://circuitpython.readthedocs.io/projects/mcp2515/en/latest/', None),
|
|
||||||
"typing": ('https://circuitpython.readthedocs.io/projects/adafruit-circuitpython-typing/en/latest/', None)}
|
|
||||||
|
|
||||||
# Adapted from sphinxcontrib-redirects
|
# Adapted from sphinxcontrib-redirects
|
||||||
from sphinx.builders import html as builders
|
from sphinx.builders import html as builders
|
||||||
@ -485,26 +474,6 @@ def generate_redirects(app):
|
|||||||
with open(redirected_filename, 'w') as f:
|
with open(redirected_filename, 'w') as f:
|
||||||
f.write(TEMPLATE % urllib.parse.quote(to_path, '#/'))
|
f.write(TEMPLATE % urllib.parse.quote(to_path, '#/'))
|
||||||
|
|
||||||
def adafruit_typing_workaround(app, env, node, contnode):
|
|
||||||
# Sphinx marks a requesting node that uses circuitpython-typing
|
|
||||||
# as looking for a "class" definition, but unfortunately
|
|
||||||
# Sphinx doesn't recognize TypeAlias based types usefully from
|
|
||||||
# the typing library.
|
|
||||||
# (see: https://github.com/sphinx-doc/sphinx/issues/8934)
|
|
||||||
# Instead, it categorizes these types as "data".
|
|
||||||
# (see: python -m sphinx.ext.intersphinx \
|
|
||||||
# https://docs.circuitpython.org/projects/adafruit-circuitpython-typing/en/latest/objects.inv)
|
|
||||||
# This workaround traps missing references, checks if
|
|
||||||
# they are likely to be in the circuitpython_typing package,
|
|
||||||
# and changes the requesting type from "class" to "data" if
|
|
||||||
# needed, and re-tries the reference resolver.
|
|
||||||
ref = node.get("reftarget", None)
|
|
||||||
if ref and ref.startswith("circuitpython_typing."):
|
|
||||||
dtype = node.get("reftype", None)
|
|
||||||
if dtype != "data":
|
|
||||||
node.attributes.update({"reftype": "data"})
|
|
||||||
return intersphinx.missing_reference(app, env, node, contnode)
|
|
||||||
|
|
||||||
|
|
||||||
class CoreModuleTransform(SphinxTransform):
|
class CoreModuleTransform(SphinxTransform):
|
||||||
default_priority = 870
|
default_priority = 870
|
||||||
@ -541,5 +510,4 @@ def setup(app):
|
|||||||
app.add_js_file("filter.js")
|
app.add_js_file("filter.js")
|
||||||
app.add_config_value('redirects_file', 'redirects', 'env')
|
app.add_config_value('redirects_file', 'redirects', 'env')
|
||||||
app.connect('builder-inited', generate_redirects)
|
app.connect('builder-inited', generate_redirects)
|
||||||
app.connect('missing-reference', adafruit_typing_workaround)
|
|
||||||
app.add_transform(CoreModuleTransform)
|
app.add_transform(CoreModuleTransform)
|
||||||
|
@ -1 +1 @@
|
|||||||
Subproject commit d17b999f46fd148ac192ad692b8a4639f81add38
|
Subproject commit 266ea20ed80104c315dcb124b482fa5f9f48cdec
|
@ -49,10 +49,6 @@
|
|||||||
#include "shared-bindings/_bleio/ScanEntry.h"
|
#include "shared-bindings/_bleio/ScanEntry.h"
|
||||||
#include "shared-bindings/time/__init__.h"
|
#include "shared-bindings/time/__init__.h"
|
||||||
|
|
||||||
#if CIRCUITPY_OS_GETENV
|
|
||||||
#include "shared-bindings/os/__init__.h"
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define MSEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000) / (RESOLUTION))
|
#define MSEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000) / (RESOLUTION))
|
||||||
#define SEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000000) / (RESOLUTION))
|
#define SEC_TO_UNITS(TIME, RESOLUTION) (((TIME) * 1000000) / (RESOLUTION))
|
||||||
#define UNITS_TO_SEC(TIME, RESOLUTION) (((TIME)*(RESOLUTION)) / 1000000)
|
#define UNITS_TO_SEC(TIME, RESOLUTION) (((TIME)*(RESOLUTION)) / 1000000)
|
||||||
@ -82,23 +78,28 @@ STATIC void add_generic_services(bleio_adapter_obj_t *adapter) {
|
|||||||
|
|
||||||
// Generic Access Service setup.
|
// Generic Access Service setup.
|
||||||
|
|
||||||
bleio_uuid_obj_t *generic_access_service_uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
bleio_uuid_obj_t *generic_access_service_uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
generic_access_service_uuid->base.type = &bleio_uuid_type;
|
||||||
common_hal_bleio_uuid_construct(generic_access_service_uuid, 0x1800, NULL);
|
common_hal_bleio_uuid_construct(generic_access_service_uuid, 0x1800, NULL);
|
||||||
|
|
||||||
bleio_uuid_obj_t *device_name_characteristic_uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
bleio_uuid_obj_t *device_name_characteristic_uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
device_name_characteristic_uuid->base.type = &bleio_uuid_type;
|
||||||
common_hal_bleio_uuid_construct(device_name_characteristic_uuid, 0x2A00, NULL);
|
common_hal_bleio_uuid_construct(device_name_characteristic_uuid, 0x2A00, NULL);
|
||||||
|
|
||||||
bleio_uuid_obj_t *appearance_characteristic_uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
bleio_uuid_obj_t *appearance_characteristic_uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
appearance_characteristic_uuid->base.type = &bleio_uuid_type;
|
||||||
common_hal_bleio_uuid_construct(appearance_characteristic_uuid, 0x2A01, NULL);
|
common_hal_bleio_uuid_construct(appearance_characteristic_uuid, 0x2A01, NULL);
|
||||||
|
|
||||||
// Not implemented:
|
// Not implemented:
|
||||||
// Peripheral Preferred Connection Parameters
|
// Peripheral Preferred Connection Parameters
|
||||||
// Central Address Resolution
|
// Central Address Resolution
|
||||||
|
|
||||||
bleio_service_obj_t *generic_access_service = mp_obj_malloc(bleio_service_obj_t, &bleio_service_type);
|
bleio_service_obj_t *generic_access_service = m_new_obj(bleio_service_obj_t);
|
||||||
|
generic_access_service->base.type = &bleio_service_type;
|
||||||
common_hal_bleio_service_construct(generic_access_service, generic_access_service_uuid, false);
|
common_hal_bleio_service_construct(generic_access_service, generic_access_service_uuid, false);
|
||||||
|
|
||||||
adapter->device_name_characteristic = mp_obj_malloc(bleio_characteristic_obj_t, &bleio_characteristic_type);
|
adapter->device_name_characteristic = m_new_obj(bleio_characteristic_obj_t);
|
||||||
|
adapter->device_name_characteristic->base.type = &bleio_characteristic_type;
|
||||||
|
|
||||||
char generic_name[] = { 'C', 'I', 'R', 'C', 'U', 'I', 'T', 'P', 'Y', 'n', 'n', 'n', 'n' };
|
char generic_name[] = { 'C', 'I', 'R', 'C', 'U', 'I', 'T', 'P', 'Y', 'n', 'n', 'n', 'n' };
|
||||||
mp_buffer_info_t generic_name_bufinfo = {
|
mp_buffer_info_t generic_name_bufinfo = {
|
||||||
@ -127,7 +128,8 @@ STATIC void add_generic_services(bleio_adapter_obj_t *adapter) {
|
|||||||
.len = sizeof(zero_16),
|
.len = sizeof(zero_16),
|
||||||
};
|
};
|
||||||
|
|
||||||
adapter->appearance_characteristic = mp_obj_malloc(bleio_characteristic_obj_t, &bleio_characteristic_type);
|
adapter->appearance_characteristic = m_new_obj(bleio_characteristic_obj_t);
|
||||||
|
adapter->appearance_characteristic->base.type = &bleio_characteristic_type;
|
||||||
|
|
||||||
common_hal_bleio_characteristic_construct(
|
common_hal_bleio_characteristic_construct(
|
||||||
adapter->appearance_characteristic,
|
adapter->appearance_characteristic,
|
||||||
@ -145,16 +147,20 @@ STATIC void add_generic_services(bleio_adapter_obj_t *adapter) {
|
|||||||
|
|
||||||
// Generic Attribute Service setup.
|
// Generic Attribute Service setup.
|
||||||
|
|
||||||
bleio_uuid_obj_t *generic_attribute_service_uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
bleio_uuid_obj_t *generic_attribute_service_uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
generic_attribute_service_uuid->base.type = &bleio_uuid_type;
|
||||||
common_hal_bleio_uuid_construct(generic_attribute_service_uuid, 0x1801, NULL);
|
common_hal_bleio_uuid_construct(generic_attribute_service_uuid, 0x1801, NULL);
|
||||||
|
|
||||||
bleio_uuid_obj_t *service_changed_characteristic_uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
bleio_uuid_obj_t *service_changed_characteristic_uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
service_changed_characteristic_uuid->base.type = &bleio_uuid_type;
|
||||||
common_hal_bleio_uuid_construct(service_changed_characteristic_uuid, 0x2A05, NULL);
|
common_hal_bleio_uuid_construct(service_changed_characteristic_uuid, 0x2A05, NULL);
|
||||||
|
|
||||||
bleio_service_obj_t *generic_attribute_service = mp_obj_malloc(bleio_service_obj_t, &bleio_service_type);
|
bleio_service_obj_t *generic_attribute_service = m_new_obj(bleio_service_obj_t);
|
||||||
|
generic_attribute_service->base.type = &bleio_service_type;
|
||||||
common_hal_bleio_service_construct(generic_attribute_service, generic_attribute_service_uuid, false);
|
common_hal_bleio_service_construct(generic_attribute_service, generic_attribute_service_uuid, false);
|
||||||
|
|
||||||
adapter->service_changed_characteristic = mp_obj_malloc(bleio_characteristic_obj_t, &bleio_characteristic_type);
|
adapter->service_changed_characteristic = m_new_obj(bleio_characteristic_obj_t);
|
||||||
|
adapter->service_changed_characteristic->base.type = &bleio_characteristic_type;
|
||||||
|
|
||||||
uint32_t zero_32 = 0;
|
uint32_t zero_32 = 0;
|
||||||
mp_buffer_info_t zero_32_value = {
|
mp_buffer_info_t zero_32_value = {
|
||||||
@ -180,7 +186,7 @@ STATIC void add_generic_services(bleio_adapter_obj_t *adapter) {
|
|||||||
|
|
||||||
STATIC void check_enabled(bleio_adapter_obj_t *adapter) {
|
STATIC void check_enabled(bleio_adapter_obj_t *adapter) {
|
||||||
if (!common_hal_bleio_adapter_get_enabled(adapter)) {
|
if (!common_hal_bleio_adapter_get_enabled(adapter)) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Adapter not enabled"));
|
mp_raise_bleio_BluetoothError(translate("Adapter not enabled"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -272,43 +278,33 @@ char default_ble_name[] = { 'C', 'I', 'R', 'C', 'U', 'I', 'T', 'P', 'Y', 0, 0, 0
|
|||||||
// Get various values and limits set by the adapter.
|
// Get various values and limits set by the adapter.
|
||||||
// Set event mask.
|
// Set event mask.
|
||||||
STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) {
|
STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) {
|
||||||
mp_int_t name_len = 0;
|
|
||||||
|
|
||||||
#if CIRCUITPY_OS_GETENV
|
const size_t len = sizeof(default_ble_name);
|
||||||
mp_obj_t name = common_hal_os_getenv("CIRCUITPY_BLE_NAME", mp_const_none);
|
|
||||||
if (name != mp_const_none) {
|
|
||||||
mp_arg_validate_type_string(name, MP_QSTR_CIRCUITPY_BLE_NAME);
|
|
||||||
self->name = name;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
if (!self->name) {
|
|
||||||
name_len = sizeof(default_ble_name);
|
|
||||||
bt_addr_t addr;
|
bt_addr_t addr;
|
||||||
hci_check_error(hci_read_bd_addr(&addr));
|
hci_check_error(hci_read_bd_addr(&addr));
|
||||||
|
|
||||||
default_ble_name[name_len - 4] = nibble_to_hex_lower[addr.val[1] >> 4 & 0xf];
|
default_ble_name[len - 4] = nibble_to_hex_lower[addr.val[1] >> 4 & 0xf];
|
||||||
default_ble_name[name_len - 3] = nibble_to_hex_lower[addr.val[1] & 0xf];
|
default_ble_name[len - 3] = nibble_to_hex_lower[addr.val[1] & 0xf];
|
||||||
default_ble_name[name_len - 2] = nibble_to_hex_lower[addr.val[0] >> 4 & 0xf];
|
default_ble_name[len - 2] = nibble_to_hex_lower[addr.val[0] >> 4 & 0xf];
|
||||||
default_ble_name[name_len - 1] = nibble_to_hex_lower[addr.val[0] & 0xf];
|
default_ble_name[len - 1] = nibble_to_hex_lower[addr.val[0] & 0xf];
|
||||||
self->name = mp_obj_new_str(default_ble_name, (uint8_t)name_len);
|
self->name = mp_obj_new_str(default_ble_name, len);
|
||||||
}
|
|
||||||
|
|
||||||
// Get version information.
|
// Get version information.
|
||||||
if (hci_read_local_version(&self->hci_version, &self->hci_revision, &self->lmp_version,
|
if (hci_read_local_version(&self->hci_version, &self->hci_revision, &self->lmp_version,
|
||||||
&self->manufacturer, &self->lmp_subversion) != HCI_OK) {
|
&self->manufacturer, &self->lmp_subversion) != HCI_OK) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not read HCI version"));
|
mp_raise_bleio_BluetoothError(translate("Could not read HCI version"));
|
||||||
}
|
}
|
||||||
// Get supported features.
|
// Get supported features.
|
||||||
if (hci_le_read_local_supported_features(self->features) != HCI_OK) {
|
if (hci_le_read_local_supported_features(self->features) != HCI_OK) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not read BLE features"));
|
mp_raise_bleio_BluetoothError(translate("Could not read BLE features"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Enabled desired events.
|
// Enabled desired events.
|
||||||
// Most importantly, includes:
|
// Most importantly, includes:
|
||||||
// BT_EVT_MASK_LE_META_EVENT BT_EVT_BIT(61)
|
// BT_EVT_MASK_LE_META_EVENT BT_EVT_BIT(61)
|
||||||
if (hci_set_event_mask(0x3FFFFFFFFFFFFFFF) != HCI_OK) {
|
if (hci_set_event_mask(0x3FFFFFFFFFFFFFFF) != HCI_OK) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not set event mask"));
|
mp_raise_bleio_BluetoothError(translate("Could not set event mask"));
|
||||||
}
|
}
|
||||||
// The default events for LE are:
|
// The default events for LE are:
|
||||||
// BT_EVT_MASK_LE_CONN_COMPLETE, BT_EVT_MASK_LE_ADVERTISING_REPORT,
|
// BT_EVT_MASK_LE_CONN_COMPLETE, BT_EVT_MASK_LE_ADVERTISING_REPORT,
|
||||||
@ -329,7 +325,7 @@ STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) {
|
|||||||
uint16_t acl_max_num;
|
uint16_t acl_max_num;
|
||||||
uint16_t sco_max_num;
|
uint16_t sco_max_num;
|
||||||
if (hci_read_buffer_size(&acl_max_len, &sco_max_len, &acl_max_num, &sco_max_num) != HCI_OK) {
|
if (hci_read_buffer_size(&acl_max_len, &sco_max_len, &acl_max_num, &sco_max_num) != HCI_OK) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not read BLE buffer info"));
|
mp_raise_bleio_BluetoothError(translate("Could not read BLE buffer info"));
|
||||||
}
|
}
|
||||||
self->max_acl_buffer_len = acl_max_len;
|
self->max_acl_buffer_len = acl_max_len;
|
||||||
self->max_acl_num_buffers = acl_max_num;
|
self->max_acl_num_buffers = acl_max_num;
|
||||||
@ -339,7 +335,7 @@ STATIC void bleio_adapter_hci_init(bleio_adapter_obj_t *self) {
|
|||||||
if (BT_FEAT_LE_EXT_ADV(self->features)) {
|
if (BT_FEAT_LE_EXT_ADV(self->features)) {
|
||||||
uint16_t max_adv_data_len;
|
uint16_t max_adv_data_len;
|
||||||
if (hci_le_read_maximum_advertising_data_length(&max_adv_data_len) != HCI_OK) {
|
if (hci_le_read_maximum_advertising_data_length(&max_adv_data_len) != HCI_OK) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not get max advertising length"));
|
mp_raise_bleio_BluetoothError(translate("Could not get max advertising length"));
|
||||||
}
|
}
|
||||||
self->max_adv_data_len = max_adv_data_len;
|
self->max_adv_data_len = max_adv_data_len;
|
||||||
} else {
|
} else {
|
||||||
@ -406,7 +402,8 @@ bleio_address_obj_t *common_hal_bleio_adapter_get_address(bleio_adapter_obj_t *s
|
|||||||
bt_addr_t addr;
|
bt_addr_t addr;
|
||||||
hci_check_error(hci_read_bd_addr(&addr));
|
hci_check_error(hci_read_bd_addr(&addr));
|
||||||
|
|
||||||
bleio_address_obj_t *address = mp_obj_malloc(bleio_address_obj_t, &bleio_address_type);
|
bleio_address_obj_t *address = m_new_obj(bleio_address_obj_t);
|
||||||
|
address->base.type = &bleio_address_type;
|
||||||
|
|
||||||
common_hal_bleio_address_construct(address, addr.val, BT_ADDR_LE_PUBLIC);
|
common_hal_bleio_address_construct(address, addr.val, BT_ADDR_LE_PUBLIC);
|
||||||
return address;
|
return address;
|
||||||
@ -472,14 +469,14 @@ mp_obj_t common_hal_bleio_adapter_start_scan(bleio_adapter_obj_t *self, uint8_t
|
|||||||
|
|
||||||
if (self->scan_results != NULL) {
|
if (self->scan_results != NULL) {
|
||||||
if (!shared_module_bleio_scanresults_get_done(self->scan_results)) {
|
if (!shared_module_bleio_scanresults_get_done(self->scan_results)) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Scan already in progress. Stop with stop_scan."));
|
mp_raise_bleio_BluetoothError(translate("Scan already in progess. Stop with stop_scan."));
|
||||||
}
|
}
|
||||||
self->scan_results = NULL;
|
self->scan_results = NULL;
|
||||||
}
|
}
|
||||||
self->scan_results = shared_module_bleio_new_scanresults(buffer_size, prefixes, prefix_length, minimum_rssi);
|
self->scan_results = shared_module_bleio_new_scanresults(buffer_size, prefixes, prefix_length, minimum_rssi);
|
||||||
|
|
||||||
// size_t max_packet_size = extended ? BLE_GAP_SCAN_BUFFER_EXTENDED_MAX_SUPPORTED : BLE_GAP_SCAN_BUFFER_MAX;
|
// size_t max_packet_size = extended ? BLE_GAP_SCAN_BUFFER_EXTENDED_MAX_SUPPORTED : BLE_GAP_SCAN_BUFFER_MAX;
|
||||||
// uint8_t *raw_data = m_malloc(sizeof(ble_data_t) + max_packet_size);
|
// uint8_t *raw_data = m_malloc(sizeof(ble_data_t) + max_packet_size, false);
|
||||||
// ble_data_t * sd_data = (ble_data_t *) raw_data;
|
// ble_data_t * sd_data = (ble_data_t *) raw_data;
|
||||||
// self->scan_results->common_hal_data = sd_data;
|
// self->scan_results->common_hal_data = sd_data;
|
||||||
// sd_data->len = max_packet_size;
|
// sd_data->len = max_packet_size;
|
||||||
@ -601,7 +598,7 @@ mp_obj_t common_hal_bleio_adapter_connect(bleio_adapter_obj_t *self, bleio_addre
|
|||||||
|
|
||||||
// uint16_t conn_handle = event_info.conn_handle;
|
// uint16_t conn_handle = event_info.conn_handle;
|
||||||
// if (conn_handle == BLE_CONN_HANDLE_INVALID) {
|
// if (conn_handle == BLE_CONN_HANDLE_INVALID) {
|
||||||
// mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Failed to connect: timeout"));
|
// mp_raise_bleio_BluetoothError(translate("Failed to connect: timeout"));
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// // Negotiate for better PHY, larger MTU and data lengths since we are the central. These are
|
// // Negotiate for better PHY, larger MTU and data lengths since we are the central. These are
|
||||||
@ -622,14 +619,14 @@ mp_obj_t common_hal_bleio_adapter_connect(bleio_adapter_obj_t *self, bleio_addre
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Failed to connect: internal error"));
|
mp_raise_bleio_BluetoothError(translate("Failed to connect: internal error"));
|
||||||
|
|
||||||
return mp_const_none;
|
return mp_const_none;
|
||||||
}
|
}
|
||||||
|
|
||||||
STATIC void check_data_fit(size_t data_len, bool connectable) {
|
STATIC void check_data_fit(size_t data_len, bool connectable) {
|
||||||
if (data_len > MAX_ADVERTISEMENT_SIZE) {
|
if (data_len > MAX_ADVERTISEMENT_SIZE) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Data too large for advertisement packet"));
|
mp_raise_ValueError(translate("Data too large for advertisement packet"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -686,7 +683,7 @@ uint32_t _common_hal_bleio_adapter_start_advertising(bleio_adapter_obj_t *self,
|
|||||||
|
|
||||||
if (extended) {
|
if (extended) {
|
||||||
if (!BT_FEAT_LE_EXT_ADV(self->features)) {
|
if (!BT_FEAT_LE_EXT_ADV(self->features)) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Data length needs extended advertising, but this adapter does not support it"));
|
mp_raise_bleio_BluetoothError(translate("Data length needs extended advertising, but this adapter does not support it"));
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t props = 0;
|
uint16_t props = 0;
|
||||||
@ -801,7 +798,7 @@ void common_hal_bleio_adapter_start_advertising(bleio_adapter_obj_t *self,
|
|||||||
check_data_fit(scan_response_data_bufinfo->len, connectable);
|
check_data_fit(scan_response_data_bufinfo->len, connectable);
|
||||||
|
|
||||||
if (advertising_data_bufinfo->len > MAX_ADVERTISEMENT_SIZE && scan_response_data_bufinfo->len > 0) {
|
if (advertising_data_bufinfo->len > MAX_ADVERTISEMENT_SIZE && scan_response_data_bufinfo->len > 0) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Extended advertisements with scan response not supported."));
|
mp_raise_bleio_BluetoothError(translate("Extended advertisements with scan response not supported."));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Anonymous mode requires a timeout so that we don't continue to broadcast
|
// Anonymous mode requires a timeout so that we don't continue to broadcast
|
||||||
@ -811,13 +808,13 @@ void common_hal_bleio_adapter_start_advertising(bleio_adapter_obj_t *self,
|
|||||||
timeout = MAX_ANONYMOUS_ADV_TIMEOUT_SECS;
|
timeout = MAX_ANONYMOUS_ADV_TIMEOUT_SECS;
|
||||||
} else {
|
} else {
|
||||||
if (timeout > MAX_LIMITED_DISCOVERABLE_ADV_TIMEOUT_SECS) {
|
if (timeout > MAX_LIMITED_DISCOVERABLE_ADV_TIMEOUT_SECS) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Timeout is too long: Maximum timeout length is %d seconds"),
|
mp_raise_bleio_BluetoothError(translate("Timeout is too long: Maximum timeout length is %d seconds"),
|
||||||
MAX_LIMITED_DISCOVERABLE_ADV_TIMEOUT_SECS);
|
MAX_LIMITED_DISCOVERABLE_ADV_TIMEOUT_SECS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tx_power != 0) {
|
if (tx_power != 0) {
|
||||||
mp_raise_NotImplementedError(MP_ERROR_TEXT("Only tx_power=0 supported"));
|
mp_raise_NotImplementedError(translate("Only tx_power=0 supported"));
|
||||||
}
|
}
|
||||||
|
|
||||||
const uint32_t result = _common_hal_bleio_adapter_start_advertising(
|
const uint32_t result = _common_hal_bleio_adapter_start_advertising(
|
||||||
@ -829,7 +826,7 @@ void common_hal_bleio_adapter_start_advertising(bleio_adapter_obj_t *self,
|
|||||||
tx_power, directed_to);
|
tx_power, directed_to);
|
||||||
|
|
||||||
if (result) {
|
if (result) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Already advertising"));
|
mp_raise_bleio_BluetoothError(translate("Already advertising"));
|
||||||
}
|
}
|
||||||
self->circuitpython_advertising = false;
|
self->circuitpython_advertising = false;
|
||||||
}
|
}
|
||||||
|
@ -65,7 +65,7 @@ typedef struct _bleio_adapter_obj_t {
|
|||||||
uint16_t manufacturer;
|
uint16_t manufacturer;
|
||||||
uint16_t lmp_subversion;
|
uint16_t lmp_subversion;
|
||||||
|
|
||||||
// Used to monitor advertising timeout for legacy advertising.
|
// Used to monitor advertising timeout for legacy avertising.
|
||||||
uint64_t advertising_start_ticks;
|
uint64_t advertising_start_ticks;
|
||||||
uint64_t advertising_timeout_msecs; // If zero, do not check.
|
uint64_t advertising_timeout_msecs; // If zero, do not check.
|
||||||
|
|
||||||
|
@ -45,5 +45,5 @@ bleio_uuid_obj_t *bleio_attribute_get_uuid(mp_obj_t *attribute) {
|
|||||||
bleio_service_obj_t *service = MP_OBJ_TO_PTR(attribute);
|
bleio_service_obj_t *service = MP_OBJ_TO_PTR(attribute);
|
||||||
return service->uuid;
|
return service->uuid;
|
||||||
}
|
}
|
||||||
mp_raise_RuntimeError(MP_ERROR_TEXT("Invalid BLE attribute"));
|
mp_raise_RuntimeError(translate("Invalid BLE attribute"));
|
||||||
}
|
}
|
||||||
|
@ -57,9 +57,9 @@ void common_hal_bleio_characteristic_construct(bleio_characteristic_obj_t *self,
|
|||||||
self->value = mp_obj_new_bytes(initial_value_bufinfo->buf, initial_value_bufinfo->len);
|
self->value = mp_obj_new_bytes(initial_value_bufinfo->buf, initial_value_bufinfo->len);
|
||||||
|
|
||||||
const mp_int_t max_length_max = 512;
|
const mp_int_t max_length_max = 512;
|
||||||
|
if (max_length < 0 || max_length > max_length_max) {
|
||||||
mp_arg_validate_int_range(max_length, 0, max_length_max, MP_QSTR_max_length);
|
mp_raise_ValueError(translate("max_length must be <= 512"));
|
||||||
|
}
|
||||||
self->max_length = max_length;
|
self->max_length = max_length;
|
||||||
self->fixed_length = fixed_length;
|
self->fixed_length = fixed_length;
|
||||||
|
|
||||||
@ -108,10 +108,10 @@ size_t common_hal_bleio_characteristic_get_value(bleio_characteristic_obj_t *sel
|
|||||||
|
|
||||||
void common_hal_bleio_characteristic_set_value(bleio_characteristic_obj_t *self, mp_buffer_info_t *bufinfo) {
|
void common_hal_bleio_characteristic_set_value(bleio_characteristic_obj_t *self, mp_buffer_info_t *bufinfo) {
|
||||||
if (self->fixed_length && bufinfo->len != self->max_length) {
|
if (self->fixed_length && bufinfo->len != self->max_length) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Value length != required fixed length"));
|
mp_raise_ValueError(translate("Value length != required fixed length"));
|
||||||
}
|
}
|
||||||
if (bufinfo->len > self->max_length) {
|
if (bufinfo->len > self->max_length) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Value length > max_length"));
|
mp_raise_ValueError(translate("Value length > max_length"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do GATT operations only if this characteristic has been added to a registered service.
|
// Do GATT operations only if this characteristic has been added to a registered service.
|
||||||
@ -125,7 +125,7 @@ void common_hal_bleio_characteristic_set_value(bleio_characteristic_obj_t *self,
|
|||||||
} else if (self->props & CHAR_PROP_WRITE_NO_RESPONSE) {
|
} else if (self->props & CHAR_PROP_WRITE_NO_RESPONSE) {
|
||||||
// att_write_cmd(conn_handle, self->handle, bufinfo->buff, bufinfo->len);
|
// att_write_cmd(conn_handle, self->handle, bufinfo->buff, bufinfo->len);
|
||||||
} else {
|
} else {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Characteristic not writable"));
|
mp_raise_bleio_BluetoothError(translate("Characteristic not writable"));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Always write the value locally even if no connections are active.
|
// Always write the value locally even if no connections are active.
|
||||||
@ -168,7 +168,7 @@ bleio_characteristic_properties_t common_hal_bleio_characteristic_get_properties
|
|||||||
void common_hal_bleio_characteristic_add_descriptor(bleio_characteristic_obj_t *self, bleio_descriptor_obj_t *descriptor) {
|
void common_hal_bleio_characteristic_add_descriptor(bleio_characteristic_obj_t *self, bleio_descriptor_obj_t *descriptor) {
|
||||||
if (self->handle != common_hal_bleio_adapter_obj.last_added_characteristic_handle) {
|
if (self->handle != common_hal_bleio_adapter_obj.last_added_characteristic_handle) {
|
||||||
mp_raise_bleio_BluetoothError(
|
mp_raise_bleio_BluetoothError(
|
||||||
MP_ERROR_TEXT("Descriptor can only be added to most recently added characteristic"));
|
translate("Descriptor can only be added to most recently added characteristic"));
|
||||||
}
|
}
|
||||||
|
|
||||||
descriptor->handle = bleio_adapter_add_attribute(&common_hal_bleio_adapter_obj, MP_OBJ_TO_PTR(descriptor));
|
descriptor->handle = bleio_adapter_add_attribute(&common_hal_bleio_adapter_obj, MP_OBJ_TO_PTR(descriptor));
|
||||||
@ -181,11 +181,11 @@ void common_hal_bleio_characteristic_add_descriptor(bleio_characteristic_obj_t *
|
|||||||
|
|
||||||
void common_hal_bleio_characteristic_set_cccd(bleio_characteristic_obj_t *self, bool notify, bool indicate) {
|
void common_hal_bleio_characteristic_set_cccd(bleio_characteristic_obj_t *self, bool notify, bool indicate) {
|
||||||
if (self->cccd == NULL) {
|
if (self->cccd == NULL) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("No CCCD for this Characteristic"));
|
mp_raise_bleio_BluetoothError(translate("No CCCD for this Characteristic"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!common_hal_bleio_service_get_is_remote(self->service)) {
|
if (!common_hal_bleio_service_get_is_remote(self->service)) {
|
||||||
mp_raise_bleio_RoleError(MP_ERROR_TEXT("Can't set CCCD on local Characteristic"));
|
mp_raise_bleio_RoleError(translate("Can't set CCCD on local Characteristic"));
|
||||||
}
|
}
|
||||||
|
|
||||||
const uint16_t conn_handle = bleio_connection_get_conn_handle(self->service->connection);
|
const uint16_t conn_handle = bleio_connection_get_conn_handle(self->service->connection);
|
||||||
@ -199,7 +199,7 @@ void common_hal_bleio_characteristic_set_cccd(bleio_characteristic_obj_t *self,
|
|||||||
(void)cccd_value;
|
(void)cccd_value;
|
||||||
// uint8_t rsp[sizeof(bt_att_error_rsp)];
|
// uint8_t rsp[sizeof(bt_att_error_rsp)];
|
||||||
// if (att_write_req(conn_handle, self->cccd->handle, &cccd_value, sizeof(cccd_value)) == 0) {
|
// if (att_write_req(conn_handle, self->cccd->handle, &cccd_value, sizeof(cccd_value)) == 0) {
|
||||||
// mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Could not write CCCD"));
|
// mp_raise_bleio_BluetoothError(translate("Could not write CCCD"));
|
||||||
// }
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +55,8 @@ void common_hal_bleio_characteristic_buffer_construct(bleio_characteristic_buffe
|
|||||||
self->characteristic = characteristic;
|
self->characteristic = characteristic;
|
||||||
self->timeout_ms = timeout * 1000;
|
self->timeout_ms = timeout * 1000;
|
||||||
// This is a macro.
|
// This is a macro.
|
||||||
ringbuf_alloc(&self->ringbuf, buffer_size);
|
// true means long-lived, so it won't be moved.
|
||||||
|
ringbuf_alloc(&self->ringbuf, buffer_size, true);
|
||||||
|
|
||||||
bleio_characteristic_set_observer(characteristic, self);
|
bleio_characteristic_set_observer(characteristic, self);
|
||||||
}
|
}
|
||||||
@ -92,7 +93,6 @@ bool common_hal_bleio_characteristic_buffer_deinited(bleio_characteristic_buffer
|
|||||||
void common_hal_bleio_characteristic_buffer_deinit(bleio_characteristic_buffer_obj_t *self) {
|
void common_hal_bleio_characteristic_buffer_deinit(bleio_characteristic_buffer_obj_t *self) {
|
||||||
if (!common_hal_bleio_characteristic_buffer_deinited(self)) {
|
if (!common_hal_bleio_characteristic_buffer_deinited(self)) {
|
||||||
bleio_characteristic_clear_observer(self->characteristic);
|
bleio_characteristic_clear_observer(self->characteristic);
|
||||||
ringbuf_deinit(&self->ringbuf);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -453,7 +453,8 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
// for (size_t i = 0; i < response->count; ++i) {
|
// for (size_t i = 0; i < response->count; ++i) {
|
||||||
// ble_gattc_service_t *gattc_service = &response->services[i];
|
// ble_gattc_service_t *gattc_service = &response->services[i];
|
||||||
|
|
||||||
// bleio_service_obj_t *service = mp_obj_malloc(bleio_service_obj_t, &bleio_service_type);
|
// bleio_service_obj_t *service = m_new_obj(bleio_service_obj_t);
|
||||||
|
// service->base.type = &bleio_service_type;
|
||||||
|
|
||||||
// // Initialize several fields at once.
|
// // Initialize several fields at once.
|
||||||
// bleio_service_from_connection(service, bleio_connection_new_from_internal(connection));
|
// bleio_service_from_connection(service, bleio_connection_new_from_internal(connection));
|
||||||
@ -465,7 +466,8 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
|
|
||||||
// if (gattc_service->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
// if (gattc_service->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
||||||
// // Known service UUID.
|
// // Known service UUID.
|
||||||
// bleio_uuid_obj_t *uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
// bleio_uuid_obj_t *uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
// uuid->base.type = &bleio_uuid_type;
|
||||||
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_service->uuid);
|
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_service->uuid);
|
||||||
// service->uuid = uuid;
|
// service->uuid = uuid;
|
||||||
// } else {
|
// } else {
|
||||||
@ -489,14 +491,15 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
// for (size_t i = 0; i < response->count; ++i) {
|
// for (size_t i = 0; i < response->count; ++i) {
|
||||||
// ble_gattc_char_t *gattc_char = &response->chars[i];
|
// ble_gattc_char_t *gattc_char = &response->chars[i];
|
||||||
|
|
||||||
// bleio_characteristic_obj_t *characteristic =
|
// bleio_characteristic_obj_t *characteristic = m_new_obj(bleio_characteristic_obj_t);
|
||||||
// mp_obj_malloc(bleio_characteristic_obj_t, &bleio_characteristic_type);
|
// characteristic->base.type = &bleio_characteristic_type;
|
||||||
|
|
||||||
// bleio_uuid_obj_t *uuid = NULL;
|
// bleio_uuid_obj_t *uuid = NULL;
|
||||||
|
|
||||||
// if (gattc_char->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
// if (gattc_char->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
||||||
// // Known characteristic UUID.
|
// // Known characteristic UUID.
|
||||||
// uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
// uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
// uuid->base.type = &bleio_uuid_type;
|
||||||
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_char->uuid);
|
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_char->uuid);
|
||||||
// } else {
|
// } else {
|
||||||
// // The discovery response contained a 128-bit UUID that has not yet been registered with the
|
// // The discovery response contained a 128-bit UUID that has not yet been registered with the
|
||||||
@ -512,7 +515,7 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
// (gattc_char->char_props.write ? CHAR_PROP_WRITE : 0) |
|
// (gattc_char->char_props.write ? CHAR_PROP_WRITE : 0) |
|
||||||
// (gattc_char->char_props.write_wo_resp ? CHAR_PROP_WRITE_NO_RESPONSE : 0);
|
// (gattc_char->char_props.write_wo_resp ? CHAR_PROP_WRITE_NO_RESPONSE : 0);
|
||||||
|
|
||||||
// // Call common_hal_bleio_characteristic_construct() to initialize some fields and set up evt handler.
|
// // Call common_hal_bleio_characteristic_construct() to initalize some fields and set up evt handler.
|
||||||
// common_hal_bleio_characteristic_construct(
|
// common_hal_bleio_characteristic_construct(
|
||||||
// characteristic, m_char_discovery_service, gattc_char->handle_value, uuid,
|
// characteristic, m_char_discovery_service, gattc_char->handle_value, uuid,
|
||||||
// props, SECURITY_MODE_OPEN, SECURITY_MODE_OPEN,
|
// props, SECURITY_MODE_OPEN, SECURITY_MODE_OPEN,
|
||||||
@ -554,13 +557,15 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
// break;
|
// break;
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// bleio_descriptor_obj_t *descriptor = mp_obj_malloc(bleio_descriptor_obj_t, &bleio_descriptor_type);
|
// bleio_descriptor_obj_t *descriptor = m_new_obj(bleio_descriptor_obj_t);
|
||||||
|
// descriptor->base.type = &bleio_descriptor_type;
|
||||||
|
|
||||||
// bleio_uuid_obj_t *uuid = NULL;
|
// bleio_uuid_obj_t *uuid = NULL;
|
||||||
|
|
||||||
// if (gattc_desc->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
// if (gattc_desc->uuid.type != BLE_UUID_TYPE_UNKNOWN) {
|
||||||
// // Known descriptor UUID.
|
// // Known descriptor UUID.
|
||||||
// uuid = mp_obj_malloc(bleio_uuid_obj_t, &bleio_uuid_type);
|
// uuid = m_new_obj(bleio_uuid_obj_t);
|
||||||
|
// uuid->base.type = &bleio_uuid_type;
|
||||||
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_desc->uuid);
|
// bleio_uuid_construct_from_nrf_ble_uuid(uuid, &gattc_desc->uuid);
|
||||||
// } else {
|
// } else {
|
||||||
// // The discovery response contained a 128-bit UUID that has not yet been registered with the
|
// // The discovery response contained a 128-bit UUID that has not yet been registered with the
|
||||||
@ -640,7 +645,7 @@ void common_hal_bleio_connection_set_connection_interval(bleio_connection_intern
|
|||||||
// mp_obj_t uuid_obj;
|
// mp_obj_t uuid_obj;
|
||||||
// while ((uuid_obj = mp_iternext(iterable)) != MP_OBJ_STOP_ITERATION) {
|
// while ((uuid_obj = mp_iternext(iterable)) != MP_OBJ_STOP_ITERATION) {
|
||||||
// if (!mp_obj_is_type(uuid_obj, &bleio_uuid_type)) {
|
// if (!mp_obj_is_type(uuid_obj, &bleio_uuid_type)) {
|
||||||
// mp_raise_TypeError(MP_ERROR_TEXT("non-UUID found in service_uuids_whitelist"));
|
// mp_raise_TypeError(translate("non-UUID found in service_uuids_whitelist"));
|
||||||
// }
|
// }
|
||||||
// bleio_uuid_obj_t *uuid = MP_OBJ_TO_PTR(uuid_obj);
|
// bleio_uuid_obj_t *uuid = MP_OBJ_TO_PTR(uuid_obj);
|
||||||
|
|
||||||
@ -745,7 +750,8 @@ mp_obj_t bleio_connection_new_from_internal(bleio_connection_internal_t *interna
|
|||||||
if (internal->connection_obj != mp_const_none) {
|
if (internal->connection_obj != mp_const_none) {
|
||||||
return internal->connection_obj;
|
return internal->connection_obj;
|
||||||
}
|
}
|
||||||
bleio_connection_obj_t *connection = mp_obj_malloc(bleio_connection_obj_t, &bleio_connection_type);
|
bleio_connection_obj_t *connection = m_new_obj(bleio_connection_obj_t);
|
||||||
|
connection->base.type = &bleio_connection_type;
|
||||||
connection->connection = internal;
|
connection->connection = internal;
|
||||||
internal->connection_obj = connection;
|
internal->connection_obj = connection;
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ void common_hal_bleio_descriptor_construct(bleio_descriptor_obj_t *self, bleio_c
|
|||||||
|
|
||||||
const mp_int_t max_length_max = fixed_length ? BLE_GATTS_FIX_ATTR_LEN_MAX : BLE_GATTS_VAR_ATTR_LEN_MAX;
|
const mp_int_t max_length_max = fixed_length ? BLE_GATTS_FIX_ATTR_LEN_MAX : BLE_GATTS_VAR_ATTR_LEN_MAX;
|
||||||
if (max_length < 0 || max_length > max_length_max) {
|
if (max_length < 0 || max_length > max_length_max) {
|
||||||
mp_raise_ValueError_varg(MP_ERROR_TEXT("max_length must be 0-%d when fixed_length is %s"),
|
mp_raise_ValueError_varg(translate("max_length must be 0-%d when fixed_length is %s"),
|
||||||
max_length_max, fixed_length ? "True" : "False");
|
max_length_max, fixed_length ? "True" : "False");
|
||||||
}
|
}
|
||||||
self->max_length = max_length;
|
self->max_length = max_length;
|
||||||
@ -85,10 +85,10 @@ size_t common_hal_bleio_descriptor_get_value(bleio_descriptor_obj_t *self, uint8
|
|||||||
|
|
||||||
void common_hal_bleio_descriptor_set_value(bleio_descriptor_obj_t *self, mp_buffer_info_t *bufinfo) {
|
void common_hal_bleio_descriptor_set_value(bleio_descriptor_obj_t *self, mp_buffer_info_t *bufinfo) {
|
||||||
if (self->fixed_length && bufinfo->len != self->max_length) {
|
if (self->fixed_length && bufinfo->len != self->max_length) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Value length != required fixed length"));
|
mp_raise_ValueError(translate("Value length != required fixed length"));
|
||||||
}
|
}
|
||||||
if (bufinfo->len > self->max_length) {
|
if (bufinfo->len > self->max_length) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Value length > max_length"));
|
mp_raise_ValueError(translate("Value length > max_length"));
|
||||||
}
|
}
|
||||||
|
|
||||||
self->value = mp_obj_new_bytes(bufinfo->buf, bufinfo->len);
|
self->value = mp_obj_new_bytes(bufinfo->buf, bufinfo->len);
|
||||||
|
@ -37,13 +37,13 @@
|
|||||||
#include "supervisor/shared/tick.h"
|
#include "supervisor/shared/tick.h"
|
||||||
|
|
||||||
STATIC void write_to_ringbuf(bleio_packet_buffer_obj_t *self, uint8_t *data, uint16_t len) {
|
STATIC void write_to_ringbuf(bleio_packet_buffer_obj_t *self, uint8_t *data, uint16_t len) {
|
||||||
if (len + sizeof(uint16_t) > ringbuf_size(&self->ringbuf)) {
|
if (len + sizeof(uint16_t) > ringbuf_capacity(&self->ringbuf)) {
|
||||||
// This shouldn't happen.
|
// This shouldn't happen.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Push all the data onto the ring buffer.
|
// Push all the data onto the ring buffer.
|
||||||
// Make room for the new value by dropping the oldest packets first.
|
// Make room for the new value by dropping the oldest packets first.
|
||||||
while (ringbuf_size(&self->ringbuf) - ringbuf_num_filled(&self->ringbuf) < len + sizeof(uint16_t)) {
|
while (ringbuf_capacity(&self->ringbuf) - ringbuf_num_filled(&self->ringbuf) < len + sizeof(uint16_t)) {
|
||||||
uint16_t packet_length;
|
uint16_t packet_length;
|
||||||
ringbuf_get_n(&self->ringbuf, (uint8_t *)&packet_length, sizeof(uint16_t));
|
ringbuf_get_n(&self->ringbuf, (uint8_t *)&packet_length, sizeof(uint16_t));
|
||||||
for (uint16_t i = 0; i < packet_length; i++) {
|
for (uint16_t i = 0; i < packet_length; i++) {
|
||||||
@ -101,8 +101,8 @@ void common_hal_bleio_packet_buffer_construct(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (incoming) {
|
if (incoming) {
|
||||||
if (!ringbuf_alloc(&self->ringbuf, buffer_size * (sizeof(uint16_t) + max_packet_size))) {
|
if (!ringbuf_alloc(&self->ringbuf, buffer_size * (sizeof(uint16_t) + max_packet_size), false)) {
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Buffer too large and unable to allocate"));
|
mp_raise_ValueError(translate("Buffer too large and unable to allocate"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,8 +110,8 @@ void common_hal_bleio_packet_buffer_construct(
|
|||||||
self->packet_queued = false;
|
self->packet_queued = false;
|
||||||
self->pending_index = 0;
|
self->pending_index = 0;
|
||||||
self->pending_size = 0;
|
self->pending_size = 0;
|
||||||
self->outgoing[0] = m_malloc(max_packet_size);
|
self->outgoing[0] = m_malloc(max_packet_size, false);
|
||||||
self->outgoing[1] = m_malloc(max_packet_size);
|
self->outgoing[1] = m_malloc(max_packet_size, false);
|
||||||
} else {
|
} else {
|
||||||
self->outgoing[0] = NULL;
|
self->outgoing[0] = NULL;
|
||||||
self->outgoing[1] = NULL;
|
self->outgoing[1] = NULL;
|
||||||
@ -151,7 +151,7 @@ mp_int_t common_hal_bleio_packet_buffer_readinto(bleio_packet_buffer_obj_t *self
|
|||||||
mp_int_t common_hal_bleio_packet_buffer_write(bleio_packet_buffer_obj_t *self,
|
mp_int_t common_hal_bleio_packet_buffer_write(bleio_packet_buffer_obj_t *self,
|
||||||
const uint8_t *data, size_t len, uint8_t *header, size_t header_len) {
|
const uint8_t *data, size_t len, uint8_t *header, size_t header_len) {
|
||||||
if (self->outgoing[0] == NULL) {
|
if (self->outgoing[0] == NULL) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Writes not supported on Characteristic"));
|
mp_raise_bleio_BluetoothError(translate("Writes not supported on Characteristic"));
|
||||||
}
|
}
|
||||||
if (self->conn_handle == BLE_CONN_HANDLE_INVALID) {
|
if (self->conn_handle == BLE_CONN_HANDLE_INVALID) {
|
||||||
return -1;
|
return -1;
|
||||||
@ -160,7 +160,7 @@ mp_int_t common_hal_bleio_packet_buffer_write(bleio_packet_buffer_obj_t *self,
|
|||||||
|
|
||||||
if (len + header_len > outgoing_packet_length) {
|
if (len + header_len > outgoing_packet_length) {
|
||||||
// Supplied data will not fit in a single BLE packet.
|
// Supplied data will not fit in a single BLE packet.
|
||||||
mp_raise_ValueError(MP_ERROR_TEXT("Total data to write is larger than outgoing_packet_length"));
|
mp_raise_ValueError(translate("Total data to write is larger than outgoing_packet_length"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (len + self->pending_size > outgoing_packet_length) {
|
if (len + self->pending_size > outgoing_packet_length) {
|
||||||
@ -264,6 +264,5 @@ bool common_hal_bleio_packet_buffer_deinited(bleio_packet_buffer_obj_t *self) {
|
|||||||
void common_hal_bleio_packet_buffer_deinit(bleio_packet_buffer_obj_t *self) {
|
void common_hal_bleio_packet_buffer_deinit(bleio_packet_buffer_obj_t *self) {
|
||||||
if (!common_hal_bleio_packet_buffer_deinited(self)) {
|
if (!common_hal_bleio_packet_buffer_deinited(self)) {
|
||||||
bleio_characteristic_clear_observer(self->characteristic);
|
bleio_characteristic_clear_observer(self->characteristic);
|
||||||
ringbuf_deinit(&self->ringbuf);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -53,7 +53,7 @@ uint32_t _common_hal_bleio_service_construct(bleio_service_obj_t *self, bleio_uu
|
|||||||
void common_hal_bleio_service_construct(bleio_service_obj_t *self, bleio_uuid_obj_t *uuid, bool is_secondary) {
|
void common_hal_bleio_service_construct(bleio_service_obj_t *self, bleio_uuid_obj_t *uuid, bool is_secondary) {
|
||||||
if (_common_hal_bleio_service_construct(self, uuid, is_secondary,
|
if (_common_hal_bleio_service_construct(self, uuid, is_secondary,
|
||||||
mp_obj_new_list(0, NULL)) != 0) {
|
mp_obj_new_list(0, NULL)) != 0) {
|
||||||
mp_raise_RuntimeError(MP_ERROR_TEXT("Failed to add service"));
|
mp_raise_RuntimeError(translate("Failed to add service"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,7 +89,7 @@ void common_hal_bleio_service_add_characteristic(bleio_service_obj_t *self,
|
|||||||
|
|
||||||
if (self->handle != common_hal_bleio_adapter_obj.last_added_service_handle) {
|
if (self->handle != common_hal_bleio_adapter_obj.last_added_service_handle) {
|
||||||
mp_raise_bleio_BluetoothError(
|
mp_raise_bleio_BluetoothError(
|
||||||
MP_ERROR_TEXT("Characteristic can only be added to most recently added service"));
|
translate("Characteristic can only be added to most recently added service"));
|
||||||
}
|
}
|
||||||
characteristic->decl_handle = bleio_adapter_add_attribute(
|
characteristic->decl_handle = bleio_adapter_add_attribute(
|
||||||
&common_hal_bleio_adapter_obj, MP_OBJ_TO_PTR(characteristic));
|
&common_hal_bleio_adapter_obj, MP_OBJ_TO_PTR(characteristic));
|
||||||
@ -101,7 +101,8 @@ void common_hal_bleio_service_add_characteristic(bleio_service_obj_t *self,
|
|||||||
|
|
||||||
if (characteristic->props & (CHAR_PROP_NOTIFY | CHAR_PROP_INDICATE)) {
|
if (characteristic->props & (CHAR_PROP_NOTIFY | CHAR_PROP_INDICATE)) {
|
||||||
// We need a CCCD if this characteristic is doing notify or indicate.
|
// We need a CCCD if this characteristic is doing notify or indicate.
|
||||||
bleio_descriptor_obj_t *cccd = mp_obj_malloc(bleio_descriptor_obj_t, &bleio_descriptor_type);
|
bleio_descriptor_obj_t *cccd = m_new_obj(bleio_descriptor_obj_t);
|
||||||
|
cccd->base.type = &bleio_descriptor_type;
|
||||||
|
|
||||||
uint16_t zero = 0;
|
uint16_t zero = 0;
|
||||||
mp_buffer_info_t zero_cccd_value = {
|
mp_buffer_info_t zero_cccd_value = {
|
||||||
|
@ -50,18 +50,13 @@ bool vm_used_ble;
|
|||||||
|
|
||||||
// switch (sec_status) {
|
// switch (sec_status) {
|
||||||
// case BLE_GAP_SEC_STATUS_UNSPECIFIED:
|
// case BLE_GAP_SEC_STATUS_UNSPECIFIED:
|
||||||
// mp_raise_bleio_SecurityError(MP_ERROR_TEXT("Unspecified issue. Can be that the pairing prompt on the other device was declined or ignored."));
|
// mp_raise_bleio_SecurityError(translate("Unspecified issue. Can be that the pairing prompt on the other device was declined or ignored."));
|
||||||
// return;
|
// return;
|
||||||
// default:
|
// default:
|
||||||
// mp_raise_bleio_SecurityError(MP_ERROR_TEXT("Unknown security error: 0x%04x"), sec_status);
|
// mp_raise_bleio_SecurityError(translate("Unknown security error: 0x%04x"), sec_status);
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
void bleio_user_reset() {
|
|
||||||
// HCI doesn't support the BLE workflow so just do a full reset.
|
|
||||||
bleio_reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Turn off BLE on a reset or reload.
|
// Turn off BLE on a reset or reload.
|
||||||
void bleio_reset() {
|
void bleio_reset() {
|
||||||
// Create a UUID object for all CCCD's.
|
// Create a UUID object for all CCCD's.
|
||||||
@ -96,14 +91,14 @@ bleio_adapter_obj_t common_hal_bleio_adapter_obj = {
|
|||||||
|
|
||||||
bleio_adapter_obj_t *common_hal_bleio_allocate_adapter_or_raise(void) {
|
bleio_adapter_obj_t *common_hal_bleio_allocate_adapter_or_raise(void) {
|
||||||
if (common_hal_bleio_adapter_obj.allocated) {
|
if (common_hal_bleio_adapter_obj.allocated) {
|
||||||
mp_raise_RuntimeError(MP_ERROR_TEXT("Too many Adapters"));
|
mp_raise_RuntimeError(translate("Too many Adapters"));
|
||||||
}
|
}
|
||||||
return &common_hal_bleio_adapter_obj;
|
return &common_hal_bleio_adapter_obj;
|
||||||
}
|
}
|
||||||
|
|
||||||
void common_hal_bleio_check_connected(uint16_t conn_handle) {
|
void common_hal_bleio_check_connected(uint16_t conn_handle) {
|
||||||
if (conn_handle == BLE_CONN_HANDLE_INVALID) {
|
if (conn_handle == BLE_CONN_HANDLE_INVALID) {
|
||||||
mp_raise_ConnectionError(MP_ERROR_TEXT("Not connected"));
|
mp_raise_ConnectionError(translate("Not connected"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ STATIC uint8_t bleio_properties_to_ble_spec_properties(uint8_t bleio_properties)
|
|||||||
return ble_spec_properties;
|
return ble_spec_properties;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIX not currently used; re-enable when used.
|
// FIX not currently used; reenable when used.
|
||||||
#if 0
|
#if 0
|
||||||
STATIC uint8_t ble_spec_properties_to_bleio_properties(uint8_t ble_spec_properties) {
|
STATIC uint8_t ble_spec_properties_to_bleio_properties(uint8_t ble_spec_properties) {
|
||||||
uint8_t bleio_properties = 0;
|
uint8_t bleio_properties = 0;
|
||||||
@ -963,7 +963,7 @@ static void process_read_group_req(uint16_t conn_handle, uint16_t mtu, uint8_t d
|
|||||||
// Keep track of the first one to make sure.
|
// Keep track of the first one to make sure.
|
||||||
size_t sizeof_first_service_uuid = 0;
|
size_t sizeof_first_service_uuid = 0;
|
||||||
|
|
||||||
// Size of a single bt_att_group_data chunk. Start with the initial size, and
|
// Size of a single bt_att_group_data chunk. Start with the intial size, and
|
||||||
// add the uuid size in the loop below.
|
// add the uuid size in the loop below.
|
||||||
size_t data_length = sizeof(struct bt_att_group_data);
|
size_t data_length = sizeof(struct bt_att_group_data);
|
||||||
|
|
||||||
@ -1722,57 +1722,57 @@ void att_process_data(uint16_t conn_handle, uint8_t dlen, uint8_t data[]) {
|
|||||||
|
|
||||||
// FIX Do we need all of these?
|
// FIX Do we need all of these?
|
||||||
static void check_att_err(uint8_t err) {
|
static void check_att_err(uint8_t err) {
|
||||||
mp_rom_error_text_t msg = NULL;
|
const compressed_string_t *msg = NULL;
|
||||||
switch (err) {
|
switch (err) {
|
||||||
case 0:
|
case 0:
|
||||||
return;
|
return;
|
||||||
case BT_ATT_ERR_INVALID_HANDLE:
|
case BT_ATT_ERR_INVALID_HANDLE:
|
||||||
msg = MP_ERROR_TEXT("Invalid handle");
|
msg = translate("Invalid handle");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_READ_NOT_PERMITTED:
|
case BT_ATT_ERR_READ_NOT_PERMITTED:
|
||||||
msg = MP_ERROR_TEXT("Read not permitted");
|
msg = translate("Read not permitted");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_WRITE_NOT_PERMITTED:
|
case BT_ATT_ERR_WRITE_NOT_PERMITTED:
|
||||||
msg = MP_ERROR_TEXT("Write not permitted");
|
msg = translate("Write not permitted");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_INVALID_PDU:
|
case BT_ATT_ERR_INVALID_PDU:
|
||||||
msg = MP_ERROR_TEXT("Invalid PDU");
|
msg = translate("Invalid PDU");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_NOT_SUPPORTED:
|
case BT_ATT_ERR_NOT_SUPPORTED:
|
||||||
msg = MP_ERROR_TEXT("Not supported");
|
msg = translate("Not supported");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_INVALID_OFFSET:
|
case BT_ATT_ERR_INVALID_OFFSET:
|
||||||
msg = MP_ERROR_TEXT("Invalid offset");
|
msg = translate("Invalid offset");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_PREPARE_QUEUE_FULL:
|
case BT_ATT_ERR_PREPARE_QUEUE_FULL:
|
||||||
msg = MP_ERROR_TEXT("Prepare queue full");
|
msg = translate("Prepare queue full");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_ATTRIBUTE_NOT_FOUND:
|
case BT_ATT_ERR_ATTRIBUTE_NOT_FOUND:
|
||||||
msg = MP_ERROR_TEXT("Attribute not found");
|
msg = translate("Attribute not found");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_ATTRIBUTE_NOT_LONG:
|
case BT_ATT_ERR_ATTRIBUTE_NOT_LONG:
|
||||||
msg = MP_ERROR_TEXT("Attribute not long");
|
msg = translate("Attribute not long");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_ENCRYPTION_KEY_SIZE:
|
case BT_ATT_ERR_ENCRYPTION_KEY_SIZE:
|
||||||
msg = MP_ERROR_TEXT("Encryption key size");
|
msg = translate("Encryption key size");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_INVALID_ATTRIBUTE_LEN:
|
case BT_ATT_ERR_INVALID_ATTRIBUTE_LEN:
|
||||||
msg = MP_ERROR_TEXT("Invalid attribute length");
|
msg = translate("Invalid attribute length");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_UNLIKELY:
|
case BT_ATT_ERR_UNLIKELY:
|
||||||
msg = MP_ERROR_TEXT("Unlikely");
|
msg = translate("Unlikely");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_UNSUPPORTED_GROUP_TYPE:
|
case BT_ATT_ERR_UNSUPPORTED_GROUP_TYPE:
|
||||||
msg = MP_ERROR_TEXT("Unsupported group type");
|
msg = translate("Unsupported group type");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_INSUFFICIENT_RESOURCES:
|
case BT_ATT_ERR_INSUFFICIENT_RESOURCES:
|
||||||
msg = MP_ERROR_TEXT("Insufficient resources");
|
msg = translate("Insufficient resources");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_DB_OUT_OF_SYNC:
|
case BT_ATT_ERR_DB_OUT_OF_SYNC:
|
||||||
msg = MP_ERROR_TEXT("DB out of sync");
|
msg = translate("DB out of sync");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_VALUE_NOT_ALLOWED:
|
case BT_ATT_ERR_VALUE_NOT_ALLOWED:
|
||||||
msg = MP_ERROR_TEXT("Value not allowed");
|
msg = translate("Value not allowed");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (msg) {
|
if (msg) {
|
||||||
@ -1781,15 +1781,15 @@ static void check_att_err(uint8_t err) {
|
|||||||
|
|
||||||
switch (err) {
|
switch (err) {
|
||||||
case BT_ATT_ERR_AUTHENTICATION:
|
case BT_ATT_ERR_AUTHENTICATION:
|
||||||
msg = MP_ERROR_TEXT("Insufficient authentication");
|
msg = translate("Insufficient authentication");
|
||||||
break;
|
break;
|
||||||
case BT_ATT_ERR_INSUFFICIENT_ENCRYPTION:
|
case BT_ATT_ERR_INSUFFICIENT_ENCRYPTION:
|
||||||
msg = MP_ERROR_TEXT("Insufficient encryption");
|
msg = translate("Insufficient encryption");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (msg) {
|
if (msg) {
|
||||||
mp_raise_bleio_SecurityError(msg);
|
mp_raise_bleio_SecurityError(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Unknown ATT error: 0x%02x"), err);
|
mp_raise_bleio_BluetoothError(translate("Unknown ATT error: 0x%02x"), err);
|
||||||
}
|
}
|
||||||
|
@ -770,35 +770,35 @@ void hci_check_error(hci_result_t result) {
|
|||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_RESPONSE_TIMEOUT:
|
case HCI_RESPONSE_TIMEOUT:
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Timeout waiting for HCI response"));
|
mp_raise_bleio_BluetoothError(translate("Timeout waiting for HCI response"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_WRITE_TIMEOUT:
|
case HCI_WRITE_TIMEOUT:
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Timeout waiting to write HCI request"));
|
mp_raise_bleio_BluetoothError(translate("Timeout waiting to write HCI request"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_READ_ERROR:
|
case HCI_READ_ERROR:
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Error reading from HCI adapter"));
|
mp_raise_bleio_BluetoothError(translate("Error reading from HCI adapter"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_WRITE_ERROR:
|
case HCI_WRITE_ERROR:
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Error writing to HCI adapter"));
|
mp_raise_bleio_BluetoothError(translate("Error writing to HCI adapter"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_PACKET_SIZE_ERROR:
|
case HCI_PACKET_SIZE_ERROR:
|
||||||
mp_raise_RuntimeError(MP_ERROR_TEXT("HCI packet size mismatch"));
|
mp_raise_RuntimeError(translate("HCI packet size mismatch"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
case HCI_ATT_ERROR:
|
case HCI_ATT_ERROR:
|
||||||
mp_raise_RuntimeError(MP_ERROR_TEXT("Error in ATT protocol code"));
|
mp_raise_RuntimeError(translate("Error in ATT protocol code"));
|
||||||
return;
|
return;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// Should be an HCI status error, > 0.
|
// Should be an HCI status error, > 0.
|
||||||
if (result > 0) {
|
if (result > 0) {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("HCI status error: %02x"), result);
|
mp_raise_bleio_BluetoothError(translate("HCI status error: %02x"), result);
|
||||||
} else {
|
} else {
|
||||||
mp_raise_bleio_BluetoothError(MP_ERROR_TEXT("Unknown hci_result_t: %d"), result);
|
mp_raise_bleio_BluetoothError(translate("Unknown hci_result_t: %d"), result);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,7 @@ struct bt_att_read_mult_req {
|
|||||||
uint16_t handles[];
|
uint16_t handles[];
|
||||||
} __packed;
|
} __packed;
|
||||||
|
|
||||||
/* Read Multiple Response */
|
/* Read Multiple Respose */
|
||||||
#define BT_ATT_OP_READ_MULT_RSP 0x0f
|
#define BT_ATT_OP_READ_MULT_RSP 0x0f
|
||||||
struct bt_att_read_mult_rsp {
|
struct bt_att_read_mult_rsp {
|
||||||
uint8_t _dummy[0];
|
uint8_t _dummy[0];
|
||||||
@ -243,7 +243,7 @@ struct bt_att_read_mult_vl_req {
|
|||||||
uint16_t handles[];
|
uint16_t handles[];
|
||||||
} __packed;
|
} __packed;
|
||||||
|
|
||||||
/* Read Multiple Response */
|
/* Read Multiple Respose */
|
||||||
#define BT_ATT_OP_READ_MULT_VL_RSP 0x21
|
#define BT_ATT_OP_READ_MULT_VL_RSP 0x21
|
||||||
struct bt_att_read_mult_vl_rsp {
|
struct bt_att_read_mult_vl_rsp {
|
||||||
uint16_t len;
|
uint16_t len;
|
||||||
|
@ -141,7 +141,7 @@ statement will ensure hardware isn't enabled longer than needed.
|
|||||||
Verify your device
|
Verify your device
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
Whenever possible, make sure the device you are talking to is the device you expect.
|
Whenever possible, make sure device you are talking to is the device you expect.
|
||||||
If not, raise a RuntimeError. Beware that I2C addresses can be identical on
|
If not, raise a RuntimeError. Beware that I2C addresses can be identical on
|
||||||
different devices so read registers you know to make sure they match your
|
different devices so read registers you know to make sure they match your
|
||||||
expectation. Validating this upfront will help catch mistakes.
|
expectation. Validating this upfront will help catch mistakes.
|
||||||
@ -202,10 +202,10 @@ interchangeably with the CPython name. This is confusing. Instead, think up a
|
|||||||
new name that is related to the extra functionality you are adding.
|
new name that is related to the extra functionality you are adding.
|
||||||
|
|
||||||
For example, storage mounting and unmounting related functions were moved from
|
For example, storage mounting and unmounting related functions were moved from
|
||||||
``uos`` into a new `storage` module. These names better match their
|
``uos`` into a new `storage` module. Terminal related functions were moved into
|
||||||
functionality and do not conflict with CPython names. Make sure to check that
|
`multiterminal`. These names better match their functionality and do not
|
||||||
you don't conflict with CPython libraries too. That way we can port the API to
|
conflict with CPython names. Make sure to check that you don't conflict with
|
||||||
CPython in the future.
|
CPython libraries too. That way we can port the API to CPython in the future.
|
||||||
|
|
||||||
Example
|
Example
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@ -213,7 +213,7 @@ Example
|
|||||||
When adding extra functionality to CircuitPython to mimic what a normal
|
When adding extra functionality to CircuitPython to mimic what a normal
|
||||||
operating system would do, either copy an existing CPython API (for example file
|
operating system would do, either copy an existing CPython API (for example file
|
||||||
writing) or create a separate module to achieve what you want. For example,
|
writing) or create a separate module to achieve what you want. For example,
|
||||||
mounting and unmounting drives is not a part of CPython so it should be done in a
|
mounting and unmount drives is not a part of CPython so it should be done in a
|
||||||
module, such as a new ``storage`` module, that is only available in CircuitPython.
|
module, such as a new ``storage`` module, that is only available in CircuitPython.
|
||||||
That way when someone moves the code to CPython they know what parts need to be
|
That way when someone moves the code to CPython they know what parts need to be
|
||||||
adapted.
|
adapted.
|
||||||
@ -267,14 +267,6 @@ After the license comment::
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Version description
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
After the import statements::
|
|
||||||
|
|
||||||
__version__ = "0.0.0+auto.0"
|
|
||||||
__repo__ = "<repo github link>"
|
|
||||||
|
|
||||||
|
|
||||||
Class description
|
Class description
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
@ -317,7 +309,7 @@ following structure:
|
|||||||
|
|
||||||
param_type
|
param_type
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
The type of the parameter. This could be, among others, ``int``, ``float``, ``str``, ``bool``, etc.
|
The type of the parameter. This could be among other `int`, `float`, `str` `bool`, etc.
|
||||||
To document an object in the CircuitPython domain, you need to include a ``~`` before the
|
To document an object in the CircuitPython domain, you need to include a ``~`` before the
|
||||||
definition as shown in the following example:
|
definition as shown in the following example:
|
||||||
|
|
||||||
@ -502,45 +494,6 @@ backticks ``:class:`~adafruit_motor.servo.Servo```. You must also add the refer
|
|||||||
|
|
||||||
"adafruit_motor": ("https://circuitpython.readthedocs.io/projects/motor/en/latest/", None,),
|
"adafruit_motor": ("https://circuitpython.readthedocs.io/projects/motor/en/latest/", None,),
|
||||||
|
|
||||||
Use ``adafruit_register`` when possible
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
`Register <https://github.com/adafruit/Adafruit_CircuitPython_Register>`_ is
|
|
||||||
a foundational library that manages packing and unpacking data from I2C device
|
|
||||||
registers. There is also `Register SPI <https://github.com/adafruit/Adafruit_CircuitPython_Register_SPI>`_
|
|
||||||
for SPI devices. When possible, use one of these libraries for unpacking and
|
|
||||||
packing registers. This ensures the packing code is shared amongst all
|
|
||||||
registers (even across drivers). Furthermore, it simplifies device definitions
|
|
||||||
by making them declarative (only data.)
|
|
||||||
|
|
||||||
Values with non-consecutive bits in a register or that represent FIFO endpoints
|
|
||||||
may not map well to existing register classes. In unique cases like these, it is
|
|
||||||
ok to read and write the register directly.
|
|
||||||
|
|
||||||
*Do not* add all registers from a datasheet upfront. Instead, only add the ones
|
|
||||||
necessary for the functionality the driver exposes. Adding them all will lead to
|
|
||||||
unnecessary file size and API clutter. See `this video about outside-in design
|
|
||||||
from @tannewt <https://www.youtube.com/watch?v=3QewiyfBQh8>`_.
|
|
||||||
|
|
||||||
I2C Example
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from adafruit_register import i2c_bit
|
|
||||||
from adafruit_bus_device import i2c_device
|
|
||||||
|
|
||||||
class HelloWorldDevice:
|
|
||||||
"""Device with two bits to control when the words 'hello' and 'world' are lit."""
|
|
||||||
|
|
||||||
hello = i2c_bit.RWBit(0x0, 0x0)
|
|
||||||
"""Bit to indicate if hello is lit."""
|
|
||||||
|
|
||||||
world = i2c_bit.RWBit(0x1, 0x0)
|
|
||||||
"""Bit to indicate if world is lit."""
|
|
||||||
|
|
||||||
def __init__(self, i2c, device_address=0x0):
|
|
||||||
self.i2c_device = i2c_device.I2CDevice(i2c, device_address)
|
|
||||||
|
|
||||||
Use BusDevice
|
Use BusDevice
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
@ -715,24 +668,8 @@ when using ``const()``, keep in mind these general guide lines:
|
|||||||
|
|
||||||
- Always use via an import, ex: ``from micropython import const``
|
- Always use via an import, ex: ``from micropython import const``
|
||||||
- Limit use to global (module level) variables only.
|
- Limit use to global (module level) variables only.
|
||||||
- Only used when the user will not need access to variable and prefix name with
|
- If user will not need access to variable, prefix name with a leading
|
||||||
a leading underscore, ex: ``_SOME_CONST``.
|
underscore, ex: ``_SOME_CONST``.
|
||||||
|
|
||||||
Example
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
from adafruit_bus_device import i2c_device
|
|
||||||
from micropython import const
|
|
||||||
|
|
||||||
_DEFAULT_I2C_ADDR = const(0x42)
|
|
||||||
|
|
||||||
class Widget:
|
|
||||||
"""A generic widget."""
|
|
||||||
|
|
||||||
def __init__(self, i2c, address=_DEFAULT_I2C_ADDR):
|
|
||||||
self.i2c_device = i2c_device.I2CDevice(i2c, address)
|
|
||||||
|
|
||||||
Libraries Examples
|
Libraries Examples
|
||||||
------------------
|
------------------
|
||||||
@ -742,7 +679,14 @@ You could other examples if needed featuring different
|
|||||||
functionalities of the library.
|
functionalities of the library.
|
||||||
If you add additional examples, be sure to include them in the ``examples.rst``. Naming of the examples
|
If you add additional examples, be sure to include them in the ``examples.rst``. Naming of the examples
|
||||||
files should use the name of the library followed by a description, using underscore to separate them.
|
files should use the name of the library followed by a description, using underscore to separate them.
|
||||||
|
When using print statements you should use the ``" ".format()`` format, as there are particular boards
|
||||||
|
that are not capable to use f-strings.
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
text_to_display = "World!"
|
||||||
|
|
||||||
|
print("Hello {}".format(text_to_display))
|
||||||
|
|
||||||
Sensor properties and units
|
Sensor properties and units
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
@ -807,16 +751,6 @@ properties.
|
|||||||
| ``sound_level`` | float | non-unit-specific sound level (monotonic but not actual decibels) |
|
| ``sound_level`` | float | non-unit-specific sound level (monotonic but not actual decibels) |
|
||||||
+-----------------------+-----------------------+-------------------------------------------------------------------------+
|
+-----------------------+-----------------------+-------------------------------------------------------------------------+
|
||||||
|
|
||||||
Driver constant naming
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
When adding variables for constant values for a driver. Do not include the
|
|
||||||
device's name in the variable name. For example, in ``adafruit_fancy123.py``,
|
|
||||||
variables should not start with ``FANCY123_``. Adding this prefix increases RAM
|
|
||||||
usage and .mpy file size because variable names are preserved. User code should
|
|
||||||
refer to these constants as ``adafruit_fancy123.HELLO_WORLD`` for clarity.
|
|
||||||
``adafruit_fancy123.FANCY123_HELLO_WORLD`` would be overly verbose.
|
|
||||||
|
|
||||||
Adding native modules
|
Adding native modules
|
||||||
--------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
33
docs/drivers.rst
Normal file
33
docs/drivers.rst
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
Additional CircuitPython Libraries and Drivers on GitHub
|
||||||
|
=========================================================
|
||||||
|
|
||||||
|
These are libraries and drivers available in separate GitHub repos. They are
|
||||||
|
designed for use with CircuitPython and may or may not work with
|
||||||
|
`MicroPython <https://micropython.org>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Adafruit CircuitPython Library Bundle
|
||||||
|
--------------------------------------
|
||||||
|
|
||||||
|
We provide a bundle of all our libraries to ease installation of drivers and
|
||||||
|
their dependencies. The bundle is primarily geared to the Adafruit Express line
|
||||||
|
of boards which feature a relatively large external flash. With Express boards,
|
||||||
|
it's easy to copy them all onto the filesystem. However, if you don't have
|
||||||
|
enough space simply copy things over as they are needed.
|
||||||
|
|
||||||
|
- The Adafruit bundles are available on GitHub: <https://github.com/adafruit/Adafruit_CircuitPython_Bundle/releases>.
|
||||||
|
|
||||||
|
- Documentation for the bundle, which includes links to documentation for all
|
||||||
|
libraries, is available here: <https://circuitpython.readthedocs.io/projects/bundle/en/latest/>.
|
||||||
|
|
||||||
|
|
||||||
|
CircuitPython Community Library Bundle
|
||||||
|
---------------------------------------
|
||||||
|
|
||||||
|
This bundle contains non-Adafruit sponsored libraries, that are written and submitted
|
||||||
|
by members of the community.
|
||||||
|
|
||||||
|
- The Community bundles are available on GitHub: <https://github.com/adafruit/CircuitPython_Community_Bundle/releases>.
|
||||||
|
|
||||||
|
- Documentation is not available on ReadTheDocs at this time. See each library for any
|
||||||
|
included documentation.
|
@ -1,96 +0,0 @@
|
|||||||
Environment Variables
|
|
||||||
=====================
|
|
||||||
|
|
||||||
CircuitPython 8.0.0 introduces support for environment variables. Environment
|
|
||||||
variables are commonly used to store "secrets" such as Wi-Fi passwords and API
|
|
||||||
keys. This method *does not* make them secure. It only separates them from the
|
|
||||||
code.
|
|
||||||
|
|
||||||
CircuitPython uses a file called ``settings.toml`` at the drive root (no
|
|
||||||
folder) as the environment. User code can access the values from the file
|
|
||||||
using `os.getenv()`. It is recommended to save any values used repeatedly in a
|
|
||||||
variable because `os.getenv()` will parse the ``settings.toml`` file contents
|
|
||||||
on every access.
|
|
||||||
|
|
||||||
CircuitPython only supports a subset of the full toml specification, see below
|
|
||||||
for more details. The subset is very "Python-like", which is a key reason we
|
|
||||||
selected the format.
|
|
||||||
|
|
||||||
Due to technical limitations it probably also accepts some files that are
|
|
||||||
not valid TOML files; bugs of this nature are subject to change (i.e., be
|
|
||||||
fixed) without the usual deprecation period for incompatible changes.
|
|
||||||
|
|
||||||
File format example:
|
|
||||||
|
|
||||||
.. code-block::
|
|
||||||
|
|
||||||
str_key="Hello world" # with trailing comment
|
|
||||||
int_key = 7
|
|
||||||
unicode_key="œuvre"
|
|
||||||
unicode_key2="\\u0153uvre" # same as above
|
|
||||||
unicode_key3="\\U00000153uvre" # same as above
|
|
||||||
escape_codes="supported, including \\r\\n\\"\\\\"
|
|
||||||
# comment
|
|
||||||
[subtable]
|
|
||||||
subvalue="cannot retrieve this using getenv"
|
|
||||||
|
|
||||||
|
|
||||||
Details of the toml language subset
|
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
* The content is required to be in UTF-8 encoding
|
|
||||||
* The supported data types are string and integer
|
|
||||||
* Only basic strings are supported, not triple-quoted strings
|
|
||||||
* Only integers supported by strtol. (no 0o, no 0b, no underscores 1_000, 011
|
|
||||||
is 9, not 11)
|
|
||||||
* Only bare keys are supported
|
|
||||||
* Duplicate keys are not diagnosed.
|
|
||||||
* Comments are supported
|
|
||||||
* Only values from the "root table" can be retrieved
|
|
||||||
* due to technical limitations, the content of multi-line
|
|
||||||
strings can erroneously be parsed as a value.
|
|
||||||
|
|
||||||
CircuitPython behavior
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
CircuitPython will also read the environment to configure its behavior. Other
|
|
||||||
keys are ignored by CircuitPython. Here are the keys it uses:
|
|
||||||
|
|
||||||
CIRCUITPY_BLE_NAME
|
|
||||||
~~~~~~~~~~~~~~~~~~
|
|
||||||
Default BLE name the board advertises as, including for the BLE workflow.
|
|
||||||
|
|
||||||
CIRCUITPY_HEAP_START_SIZE
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
Sets the initial size of the python heap, allocated from the outer heap. Must be a multiple of 4.
|
|
||||||
The default is currently 8192.
|
|
||||||
The python heap will grow by doubling and redoubling this initial size until it cannot fit in the outer heap.
|
|
||||||
Larger values will reserve more RAM for python use and prevent the supervisor and SDK
|
|
||||||
from large allocations of their own.
|
|
||||||
Smaller values will likely grow sooner than large start sizes.
|
|
||||||
|
|
||||||
CIRCUITPY_PYSTACK_SIZE
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
Sets the size of the python stack. Must be a multiple of 4. The default value is currently 1536.
|
|
||||||
Increasing the stack reduces the size of the heap available to python code.
|
|
||||||
Used to avoid "Pystack exhausted" errors when the code can't be reworked to avoid it.
|
|
||||||
|
|
||||||
CIRCUITPY_WEB_API_PASSWORD
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
Password required to make modifications to the board from the Web Workflow.
|
|
||||||
|
|
||||||
CIRCUITPY_WEB_API_PORT
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
TCP port number used for the web HTTP API. Defaults to 80 when omitted.
|
|
||||||
|
|
||||||
CIRCUITPY_WEB_INSTANCE_NAME
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
Name the board advertises as for the WEB workflow. Defaults to human readable board name if omitted.
|
|
||||||
|
|
||||||
CIRCUITPY_WIFI_PASSWORD
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
Wi-Fi password used to auto connect to CIRCUITPY_WIFI_SSID.
|
|
||||||
|
|
||||||
CIRCUITPY_WIFI_SSID
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
Wi-Fi SSID to auto-connect to even if user code is not running.
|
|
@ -19,12 +19,9 @@ Full Table of Contents
|
|||||||
:caption: API and Usage
|
:caption: API and Usage
|
||||||
|
|
||||||
../shared-bindings/index.rst
|
../shared-bindings/index.rst
|
||||||
library/index.rst
|
|
||||||
supported_ports.rst
|
supported_ports.rst
|
||||||
troubleshooting.rst
|
troubleshooting.rst
|
||||||
libraries.rst
|
drivers.rst
|
||||||
workflows
|
|
||||||
environment.rst
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
@ -33,11 +30,13 @@ Full Table of Contents
|
|||||||
design_guide
|
design_guide
|
||||||
porting
|
porting
|
||||||
common_hal
|
common_hal
|
||||||
reference/glossary.rst
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
:caption: Python stand
|
:caption: MicroPython specific
|
||||||
|
|
||||||
|
library/index.rst
|
||||||
|
reference/glossary.rst
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
@ -47,7 +46,7 @@ Full Table of Contents
|
|||||||
../CONTRIBUTING
|
../CONTRIBUTING
|
||||||
../BUILDING
|
../BUILDING
|
||||||
../CODE_OF_CONDUCT
|
../CODE_OF_CONDUCT
|
||||||
../docs/LICENSE
|
../license.rst
|
||||||
../WEBUSB_README
|
../WEBUSB_README
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
|
@ -1,31 +0,0 @@
|
|||||||
Adafruit CircuitPython Libraries
|
|
||||||
================================
|
|
||||||
|
|
||||||
Documentation for all Adafruit-sponsored CircuitPython libraries is at:
|
|
||||||
<https://docs.circuitpython.org/projects/bundle/en/latest/drivers.html>.
|
|
||||||
|
|
||||||
|
|
||||||
CircuitPython Library Bundles
|
|
||||||
=============================
|
|
||||||
|
|
||||||
Many Python libraries, including device drivers, have been written for use with CircuitPython.
|
|
||||||
They are maintained in separate GitHub repos, one per library.
|
|
||||||
|
|
||||||
Libraries are packaged in *bundles*, which are ZIP files that are snapshots in time of a group of libraries.
|
|
||||||
|
|
||||||
Adafruit sponsors and maintains several hundred libraries, packaged in the **Adafruit Library Bundle**.
|
|
||||||
Adafruit-sponsored libraries are also available on <https://pypi.org>.
|
|
||||||
|
|
||||||
Yet other libraries are maintained by members of the CircuitPython community,
|
|
||||||
and are packaged in the **CircuitPython Community Library Bundle**.
|
|
||||||
|
|
||||||
The Adafruit bundles are available on GitHub: <https://github.com/adafruit/Adafruit_CircuitPython_Bundle/releases>.
|
|
||||||
The Community bundles are available at: <https://github.com/adafruit/CircuitPython_Community_Bundle/releases>.
|
|
||||||
|
|
||||||
More detailed information about the bundles, and download links for the latest bundles
|
|
||||||
are at <https://circuitpython.org/libraries>.
|
|
||||||
|
|
||||||
Documentation about bundle construction is at: <https://circuitpython.readthedocs.io/projects/bundle/en/latest/>.
|
|
||||||
|
|
||||||
Documentation for Community Libraries is not available on ReadTheDocs at this time. See the GitHub repository
|
|
||||||
for each library for any included documentation.
|
|
@ -1,10 +1,10 @@
|
|||||||
:mod:`array` -- arrays of numeric data
|
:mod:`array` -- arrays of numeric data
|
||||||
======================================
|
=======================================
|
||||||
|
|
||||||
.. module:: array
|
.. module:: array
|
||||||
:synopsis: efficient arrays of numeric data
|
:synopsis: efficient arrays of numeric data
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:array`.
|
|see_cpython_module| :mod:`cpython:array`.
|
||||||
|
|
||||||
Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``,
|
Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``,
|
||||||
``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the
|
``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the
|
||||||
@ -27,55 +27,3 @@ Classes
|
|||||||
|
|
||||||
Append new elements as contained in `iterable` to the end of
|
Append new elements as contained in `iterable` to the end of
|
||||||
array, growing it.
|
array, growing it.
|
||||||
|
|
||||||
.. method:: __getitem__(index)
|
|
||||||
|
|
||||||
Indexed read of the array, called as ``a[index]`` (where ``a`` is an ``array``).
|
|
||||||
Returns a value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
|
||||||
Negative indices count from the end and ``IndexError`` is thrown if the index is
|
|
||||||
out of range.
|
|
||||||
|
|
||||||
**Note:** ``__getitem__`` cannot be called directly (``a.__getitem__(index)`` fails) and
|
|
||||||
is not present in ``__dict__``, however ``a[index]`` does work.
|
|
||||||
|
|
||||||
.. method:: __setitem__(index, value)
|
|
||||||
|
|
||||||
Indexed write into the array, called as ``a[index] = value`` (where ``a`` is an ``array``).
|
|
||||||
``value`` is a single value if *index* is an ``int`` and an ``array`` if *index* is a slice.
|
|
||||||
Negative indices count from the end and ``IndexError`` is thrown if the index is out of range.
|
|
||||||
|
|
||||||
**Note:** ``__setitem__`` cannot be called directly (``a.__setitem__(index, value)`` fails) and
|
|
||||||
is not present in ``__dict__``, however ``a[index] = value`` does work.
|
|
||||||
|
|
||||||
.. method:: __len__()
|
|
||||||
|
|
||||||
Returns the number of items in the array, called as ``len(a)`` (where ``a`` is an ``array``).
|
|
||||||
|
|
||||||
**Note:** ``__len__`` cannot be called directly (``a.__len__()`` fails) and the
|
|
||||||
method is not present in ``__dict__``, however ``len(a)`` does work.
|
|
||||||
|
|
||||||
.. method:: __add__(other)
|
|
||||||
|
|
||||||
Return a new ``array`` that is the concatenation of the array with *other*, called as
|
|
||||||
``a + other`` (where ``a`` and *other* are both ``arrays``).
|
|
||||||
|
|
||||||
**Note:** ``__add__`` cannot be called directly (``a.__add__(other)`` fails) and
|
|
||||||
is not present in ``__dict__``, however ``a + other`` does work.
|
|
||||||
|
|
||||||
.. method:: __iadd__(other)
|
|
||||||
|
|
||||||
Concatenates the array with *other* in-place, called as ``a += other`` (where ``a`` and *other*
|
|
||||||
are both ``arrays``). Equivalent to ``extend(other)``.
|
|
||||||
|
|
||||||
**Note:** ``__iadd__`` cannot be called directly (``a.__iadd__(other)`` fails) and
|
|
||||||
is not present in ``__dict__``, however ``a += other`` does work.
|
|
||||||
|
|
||||||
.. method:: __repr__()
|
|
||||||
|
|
||||||
Returns the string representation of the array, called as ``str(a)`` or ``repr(a)```
|
|
||||||
(where ``a`` is an ``array``). Returns the string ``"array(<type>, [<elements>])"``,
|
|
||||||
where ``<type>`` is the type code letter for the array and ``<elements>`` is a comma
|
|
||||||
separated list of the elements of the array.
|
|
||||||
|
|
||||||
**Note:** ``__repr__`` cannot be called directly (``a.__repr__()`` fails) and
|
|
||||||
is not present in ``__dict__``, however ``str(a)`` and ``repr(a)`` both work.
|
|
||||||
|
323
docs/library/asyncio.rst
Normal file
323
docs/library/asyncio.rst
Normal file
@ -0,0 +1,323 @@
|
|||||||
|
:mod:`uasyncio` --- asynchronous I/O scheduler
|
||||||
|
==============================================
|
||||||
|
|
||||||
|
.. module:: uasyncio
|
||||||
|
:synopsis: asynchronous I/O scheduler for writing concurrent code
|
||||||
|
|
||||||
|
|see_cpython_module|
|
||||||
|
`asyncio <https://docs.python.org/3.8/library/asyncio.html>`_
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
import uasyncio
|
||||||
|
|
||||||
|
async def blink(led, period_ms):
|
||||||
|
while True:
|
||||||
|
led.on()
|
||||||
|
await uasyncio.sleep_ms(5)
|
||||||
|
led.off()
|
||||||
|
await uasyncio.sleep_ms(period_ms)
|
||||||
|
|
||||||
|
async def main(led1, led2):
|
||||||
|
uasyncio.create_task(blink(led1, 700))
|
||||||
|
uasyncio.create_task(blink(led2, 400))
|
||||||
|
await uasyncio.sleep_ms(10_000)
|
||||||
|
|
||||||
|
# Running on a pyboard
|
||||||
|
from pyb import LED
|
||||||
|
uasyncio.run(main(LED(1), LED(2)))
|
||||||
|
|
||||||
|
# Running on a generic board
|
||||||
|
from machine import Pin
|
||||||
|
uasyncio.run(main(Pin(1), Pin(2)))
|
||||||
|
|
||||||
|
Core functions
|
||||||
|
--------------
|
||||||
|
|
||||||
|
.. function:: create_task(coro)
|
||||||
|
|
||||||
|
Create a new task from the given coroutine and schedule it to run.
|
||||||
|
|
||||||
|
Returns the corresponding `Task` object.
|
||||||
|
|
||||||
|
.. function:: current_task()
|
||||||
|
|
||||||
|
Return the `Task` object associated with the currently running task.
|
||||||
|
|
||||||
|
.. function:: run(coro)
|
||||||
|
|
||||||
|
Create a new task from the given coroutine and run it until it completes.
|
||||||
|
|
||||||
|
Returns the value returned by *coro*.
|
||||||
|
|
||||||
|
.. function:: sleep(t)
|
||||||
|
|
||||||
|
Sleep for *t* seconds (can be a float).
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. function:: sleep_ms(t)
|
||||||
|
|
||||||
|
Sleep for *t* milliseconds.
|
||||||
|
|
||||||
|
This is a coroutine, and a MicroPython extension.
|
||||||
|
|
||||||
|
Additional functions
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
.. function:: wait_for(awaitable, timeout)
|
||||||
|
|
||||||
|
Wait for the *awaitable* to complete, but cancel it if it takes longer
|
||||||
|
that *timeout* seconds. If *awaitable* is not a task then a task will be
|
||||||
|
created from it.
|
||||||
|
|
||||||
|
If a timeout occurs, it cancels the task and raises ``asyncio.TimeoutError``:
|
||||||
|
this should be trapped by the caller.
|
||||||
|
|
||||||
|
Returns the return value of *awaitable*.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. function:: wait_for_ms(awaitable, timeout)
|
||||||
|
|
||||||
|
Similar to `wait_for` but *timeout* is an integer in milliseconds.
|
||||||
|
|
||||||
|
This is a coroutine, and a MicroPython extension.
|
||||||
|
|
||||||
|
.. function:: gather(*awaitables, return_exceptions=False)
|
||||||
|
|
||||||
|
Run all *awaitables* concurrently. Any *awaitables* that are not tasks are
|
||||||
|
promoted to tasks.
|
||||||
|
|
||||||
|
Returns a list of return values of all *awaitables*.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
class Task
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. class:: Task()
|
||||||
|
|
||||||
|
This object wraps a coroutine into a running task. Tasks can be waited on
|
||||||
|
using ``await task``, which will wait for the task to complete and return
|
||||||
|
the return value of the task.
|
||||||
|
|
||||||
|
Tasks should not be created directly, rather use `create_task` to create them.
|
||||||
|
|
||||||
|
.. method:: Task.cancel()
|
||||||
|
|
||||||
|
Cancel the task by injecting a ``CancelledError`` into it. The task may
|
||||||
|
or may not ignore this exception.
|
||||||
|
|
||||||
|
class Event
|
||||||
|
-----------
|
||||||
|
|
||||||
|
.. class:: Event()
|
||||||
|
|
||||||
|
Create a new event which can be used to synchronise tasks. Events start
|
||||||
|
in the cleared state.
|
||||||
|
|
||||||
|
.. method:: Event.is_set()
|
||||||
|
|
||||||
|
Returns ``True`` if the event is set, ``False`` otherwise.
|
||||||
|
|
||||||
|
.. method:: Event.set()
|
||||||
|
|
||||||
|
Set the event. Any tasks waiting on the event will be scheduled to run.
|
||||||
|
|
||||||
|
.. method:: Event.clear()
|
||||||
|
|
||||||
|
Clear the event.
|
||||||
|
|
||||||
|
.. method:: Event.wait()
|
||||||
|
|
||||||
|
Wait for the event to be set. If the event is already set then it returns
|
||||||
|
immediately.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
class Lock
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. class:: Lock()
|
||||||
|
|
||||||
|
Create a new lock which can be used to coordinate tasks. Locks start in
|
||||||
|
the unlocked state.
|
||||||
|
|
||||||
|
In addition to the methods below, locks can be used in an ``async with`` statement.
|
||||||
|
|
||||||
|
.. method:: Lock.locked()
|
||||||
|
|
||||||
|
Returns ``True`` if the lock is locked, otherwise ``False``.
|
||||||
|
|
||||||
|
.. method:: Lock.acquire()
|
||||||
|
|
||||||
|
Wait for the lock to be in the unlocked state and then lock it in an atomic
|
||||||
|
way. Only one task can acquire the lock at any one time.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. method:: Lock.release()
|
||||||
|
|
||||||
|
Release the lock. If any tasks are waiting on the lock then the next one in the
|
||||||
|
queue is scheduled to run and the lock remains locked. Otherwise, no tasks are
|
||||||
|
waiting an the lock becomes unlocked.
|
||||||
|
|
||||||
|
TCP stream connections
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
.. function:: open_connection(host, port)
|
||||||
|
|
||||||
|
Open a TCP connection to the given *host* and *port*. The *host* address will be
|
||||||
|
resolved using `socket.getaddrinfo`, which is currently a blocking call.
|
||||||
|
|
||||||
|
Returns a pair of streams: a reader and a writer stream.
|
||||||
|
Will raise a socket-specific ``OSError`` if the host could not be resolved or if
|
||||||
|
the connection could not be made.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. function:: start_server(callback, host, port, backlog=5)
|
||||||
|
|
||||||
|
Start a TCP server on the given *host* and *port*. The *callback* will be
|
||||||
|
called with incoming, accepted connections, and be passed 2 arguments: reader
|
||||||
|
and writer streams for the connection.
|
||||||
|
|
||||||
|
Returns a `Server` object.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. class:: Stream()
|
||||||
|
|
||||||
|
This represents a TCP stream connection. To minimise code this class implements
|
||||||
|
both a reader and a writer, and both ``StreamReader`` and ``StreamWriter`` alias to
|
||||||
|
this class.
|
||||||
|
|
||||||
|
.. method:: Stream.get_extra_info(v)
|
||||||
|
|
||||||
|
Get extra information about the stream, given by *v*. The valid values for *v* are:
|
||||||
|
``peername``.
|
||||||
|
|
||||||
|
.. method:: Stream.close()
|
||||||
|
|
||||||
|
Close the stream.
|
||||||
|
|
||||||
|
.. method:: Stream.wait_closed()
|
||||||
|
|
||||||
|
Wait for the stream to close.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. method:: Stream.read(n)
|
||||||
|
|
||||||
|
Read up to *n* bytes and return them.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. method:: Stream.readinto(buf)
|
||||||
|
|
||||||
|
Read up to n bytes into *buf* with n being equal to the length of *buf*.
|
||||||
|
|
||||||
|
Return the number of bytes read into *buf*.
|
||||||
|
|
||||||
|
This is a coroutine, and a MicroPython extension.
|
||||||
|
|
||||||
|
.. method:: Stream.readexactly(n)
|
||||||
|
|
||||||
|
Read exactly *n* bytes and return them as a bytes object.
|
||||||
|
|
||||||
|
Raises an ``EOFError`` exception if the stream ends before reading *n* bytes.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. method:: Stream.readline()
|
||||||
|
|
||||||
|
Read a line and return it.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. method:: Stream.write(buf)
|
||||||
|
|
||||||
|
Accumulated *buf* to the output buffer. The data is only flushed when
|
||||||
|
`Stream.drain` is called. It is recommended to call `Stream.drain` immediately
|
||||||
|
after calling this function.
|
||||||
|
|
||||||
|
.. method:: Stream.drain()
|
||||||
|
|
||||||
|
Drain (write) all buffered output data out to the stream.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
.. class:: Server()
|
||||||
|
|
||||||
|
This represents the server class returned from `start_server`. It can be used
|
||||||
|
in an ``async with`` statement to close the server upon exit.
|
||||||
|
|
||||||
|
.. method:: Server.close()
|
||||||
|
|
||||||
|
Close the server.
|
||||||
|
|
||||||
|
.. method:: Server.wait_closed()
|
||||||
|
|
||||||
|
Wait for the server to close.
|
||||||
|
|
||||||
|
This is a coroutine.
|
||||||
|
|
||||||
|
Event Loop
|
||||||
|
----------
|
||||||
|
|
||||||
|
.. function:: get_event_loop()
|
||||||
|
|
||||||
|
Return the event loop used to schedule and run tasks. See `Loop`.
|
||||||
|
|
||||||
|
.. function:: new_event_loop()
|
||||||
|
|
||||||
|
Reset the event loop and return it.
|
||||||
|
|
||||||
|
Note: since MicroPython only has a single event loop this function just
|
||||||
|
resets the loop's state, it does not create a new one.
|
||||||
|
|
||||||
|
.. class:: Loop()
|
||||||
|
|
||||||
|
This represents the object which schedules and runs tasks. It cannot be
|
||||||
|
created, use `get_event_loop` instead.
|
||||||
|
|
||||||
|
.. method:: Loop.create_task(coro)
|
||||||
|
|
||||||
|
Create a task from the given *coro* and return the new `Task` object.
|
||||||
|
|
||||||
|
.. method:: Loop.run_forever()
|
||||||
|
|
||||||
|
Run the event loop until `stop()` is called.
|
||||||
|
|
||||||
|
.. method:: Loop.run_until_complete(awaitable)
|
||||||
|
|
||||||
|
Run the given *awaitable* until it completes. If *awaitable* is not a task
|
||||||
|
then it will be promoted to one.
|
||||||
|
|
||||||
|
.. method:: Loop.stop()
|
||||||
|
|
||||||
|
Stop the event loop.
|
||||||
|
|
||||||
|
.. method:: Loop.close()
|
||||||
|
|
||||||
|
Close the event loop.
|
||||||
|
|
||||||
|
.. method:: Loop.set_exception_handler(handler)
|
||||||
|
|
||||||
|
Set the exception handler to call when a Task raises an exception that is not
|
||||||
|
caught. The *handler* should accept two arguments: ``(loop, context)``.
|
||||||
|
|
||||||
|
.. method:: Loop.get_exception_handler()
|
||||||
|
|
||||||
|
Get the current exception handler. Returns the handler, or ``None`` if no
|
||||||
|
custom handler is set.
|
||||||
|
|
||||||
|
.. method:: Loop.default_exception_handler(context)
|
||||||
|
|
||||||
|
The default exception handler that is called.
|
||||||
|
|
||||||
|
.. method:: Loop.call_exception_handler(context)
|
||||||
|
|
||||||
|
Call the current exception handler. The argument *context* is passed through and
|
||||||
|
is a dictionary containing keys: ``'message'``, ``'exception'``, ``'future'``.
|
@ -1,10 +1,10 @@
|
|||||||
:mod:`binascii` -- binary/ASCII conversions
|
:mod:`binascii` -- binary/ASCII conversions
|
||||||
===========================================
|
============================================
|
||||||
|
|
||||||
.. module:: binascii
|
.. module:: binascii
|
||||||
:synopsis: binary/ASCII conversions
|
:synopsis: binary/ASCII conversions
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:binascii`.
|
|see_cpython_module| :mod:`cpython:binascii`.
|
||||||
|
|
||||||
This module implements conversions between binary data and various
|
This module implements conversions between binary data and various
|
||||||
encodings of it in ASCII form (in both directions).
|
encodings of it in ASCII form (in both directions).
|
||||||
@ -31,14 +31,8 @@ Functions
|
|||||||
Conforms to `RFC 2045 s.6.8 <https://tools.ietf.org/html/rfc2045#section-6.8>`_.
|
Conforms to `RFC 2045 s.6.8 <https://tools.ietf.org/html/rfc2045#section-6.8>`_.
|
||||||
Returns a bytes object.
|
Returns a bytes object.
|
||||||
|
|
||||||
.. function:: b2a_base64(data, *, newline=True)
|
.. function:: b2a_base64(data)
|
||||||
|
|
||||||
Encode binary data in base64 format, as in `RFC 3548
|
Encode binary data in base64 format, as in `RFC 3548
|
||||||
<https://tools.ietf.org/html/rfc3548.html>`_. Returns the encoded data
|
<https://tools.ietf.org/html/rfc3548.html>`_. Returns the encoded data
|
||||||
followed by a newline character if ``newline`` is true, as a bytes object.
|
followed by a newline character, as a bytes object.
|
||||||
|
|
||||||
.. function:: crc32(data, value=0, /)
|
|
||||||
|
|
||||||
Compute CRC-32, the 32-bit checksum of the bytes in *data* starting with an
|
|
||||||
initial CRC of *value*. The default initial CRC is 0. The algorithm is
|
|
||||||
consistent with the ZIP file checksum.
|
|
||||||
|
161
docs/library/btree.rst
Normal file
161
docs/library/btree.rst
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
:mod:`btree` -- simple BTree database
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
|
.. module:: btree
|
||||||
|
:synopsis: simple BTree database
|
||||||
|
|
||||||
|
The ``btree`` module implements a simple key-value database using external
|
||||||
|
storage (disk files, or in general case, a random-access ``stream``). Keys are
|
||||||
|
stored sorted in the database, and besides efficient retrieval by a key
|
||||||
|
value, a database also supports efficient ordered range scans (retrieval
|
||||||
|
of values with the keys in a given range). On the application interface
|
||||||
|
side, BTree database work as close a possible to a way standard `dict`
|
||||||
|
type works, one notable difference is that both keys and values must
|
||||||
|
be `bytes` objects (so, if you want to store objects of other types, you
|
||||||
|
need to serialize them to `bytes` first).
|
||||||
|
|
||||||
|
The module is based on the well-known BerkelyDB library, version 1.xx.
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
import btree
|
||||||
|
|
||||||
|
# First, we need to open a stream which holds a database
|
||||||
|
# This is usually a file, but can be in-memory database
|
||||||
|
# using io.BytesIO, a raw flash partition, etc.
|
||||||
|
# Oftentimes, you want to create a database file if it doesn't
|
||||||
|
# exist and open if it exists. Idiom below takes care of this.
|
||||||
|
# DO NOT open database with "a+b" access mode.
|
||||||
|
try:
|
||||||
|
f = open("mydb", "r+b")
|
||||||
|
except OSError:
|
||||||
|
f = open("mydb", "w+b")
|
||||||
|
|
||||||
|
# Now open a database itself
|
||||||
|
db = btree.open(f)
|
||||||
|
|
||||||
|
# The keys you add will be sorted internally in the database
|
||||||
|
db[b"3"] = b"three"
|
||||||
|
db[b"1"] = b"one"
|
||||||
|
db[b"2"] = b"two"
|
||||||
|
|
||||||
|
# Assume that any changes are cached in memory unless
|
||||||
|
# explicitly flushed (or database closed). Flush database
|
||||||
|
# at the end of each "transaction".
|
||||||
|
db.flush()
|
||||||
|
|
||||||
|
# Prints b'two'
|
||||||
|
print(db[b"2"])
|
||||||
|
|
||||||
|
# Iterate over sorted keys in the database, starting from b"2"
|
||||||
|
# until the end of the database, returning only values.
|
||||||
|
# Mind that arguments passed to values() method are *key* values.
|
||||||
|
# Prints:
|
||||||
|
# b'two'
|
||||||
|
# b'three'
|
||||||
|
for word in db.values(b"2"):
|
||||||
|
print(word)
|
||||||
|
|
||||||
|
del db[b"2"]
|
||||||
|
|
||||||
|
# No longer true, prints False
|
||||||
|
print(b"2" in db)
|
||||||
|
|
||||||
|
# Prints:
|
||||||
|
# b"1"
|
||||||
|
# b"3"
|
||||||
|
for key in db:
|
||||||
|
print(key)
|
||||||
|
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
# Don't forget to close the underlying stream!
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
Functions
|
||||||
|
---------
|
||||||
|
|
||||||
|
.. function:: open(stream, *, flags=0, pagesize=0, cachesize=0, minkeypage=0)
|
||||||
|
|
||||||
|
Open a database from a random-access ``stream`` (like an open file). All
|
||||||
|
other parameters are optional and keyword-only, and allow to tweak advanced
|
||||||
|
parameters of the database operation (most users will not need them):
|
||||||
|
|
||||||
|
* *flags* - Currently unused.
|
||||||
|
* *pagesize* - Page size used for the nodes in BTree. Acceptable range
|
||||||
|
is 512-65536. If 0, a port-specific default will be used, optimized for
|
||||||
|
port's memory usage and/or performance.
|
||||||
|
* *cachesize* - Suggested memory cache size in bytes. For a
|
||||||
|
board with enough memory using larger values may improve performance.
|
||||||
|
Cache policy is as follows: entire cache is not allocated at once;
|
||||||
|
instead, accessing a new page in database will allocate a memory buffer
|
||||||
|
for it, until value specified by *cachesize* is reached. Then, these
|
||||||
|
buffers will be managed using LRU (least recently used) policy. More
|
||||||
|
buffers may still be allocated if needed (e.g., if a database contains
|
||||||
|
big keys and/or values). Allocated cache buffers aren't reclaimed.
|
||||||
|
* *minkeypage* - Minimum number of keys to store per page. Default value
|
||||||
|
of 0 equivalent to 2.
|
||||||
|
|
||||||
|
Returns a BTree object, which implements a dictionary protocol (set
|
||||||
|
of methods), and some additional methods described below.
|
||||||
|
|
||||||
|
Methods
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. method:: btree.close()
|
||||||
|
|
||||||
|
Close the database. It's mandatory to close the database at the end of
|
||||||
|
processing, as some unwritten data may be still in the cache. Note that
|
||||||
|
this does not close underlying stream with which the database was opened,
|
||||||
|
it should be closed separately (which is also mandatory to make sure that
|
||||||
|
data flushed from buffer to the underlying storage).
|
||||||
|
|
||||||
|
.. method:: btree.flush()
|
||||||
|
|
||||||
|
Flush any data in cache to the underlying stream.
|
||||||
|
|
||||||
|
.. method:: btree.__getitem__(key)
|
||||||
|
btree.get(key, default=None, /)
|
||||||
|
btree.__setitem__(key, val)
|
||||||
|
btree.__delitem__(key)
|
||||||
|
btree.__contains__(key)
|
||||||
|
|
||||||
|
Standard dictionary methods.
|
||||||
|
|
||||||
|
.. method:: btree.__iter__()
|
||||||
|
|
||||||
|
A BTree object can be iterated over directly (similar to a dictionary)
|
||||||
|
to get access to all keys in order.
|
||||||
|
|
||||||
|
.. method:: btree.keys([start_key, [end_key, [flags]]])
|
||||||
|
btree.values([start_key, [end_key, [flags]]])
|
||||||
|
btree.items([start_key, [end_key, [flags]]])
|
||||||
|
|
||||||
|
These methods are similar to standard dictionary methods, but also can
|
||||||
|
take optional parameters to iterate over a key sub-range, instead of
|
||||||
|
the entire database. Note that for all 3 methods, *start_key* and
|
||||||
|
*end_key* arguments represent key values. For example, `values()`
|
||||||
|
method will iterate over values corresponding to they key range
|
||||||
|
given. None values for *start_key* means "from the first key", no
|
||||||
|
*end_key* or its value of None means "until the end of database".
|
||||||
|
By default, range is inclusive of *start_key* and exclusive of
|
||||||
|
*end_key*, you can include *end_key* in iteration by passing *flags*
|
||||||
|
of `btree.INCL`. You can iterate in descending key direction
|
||||||
|
by passing *flags* of `btree.DESC`. The flags values can be ORed
|
||||||
|
together.
|
||||||
|
|
||||||
|
Constants
|
||||||
|
---------
|
||||||
|
|
||||||
|
.. data:: INCL
|
||||||
|
|
||||||
|
A flag for `keys()`, `values()`, `items()` methods to specify that
|
||||||
|
scanning should be inclusive of the end key.
|
||||||
|
|
||||||
|
.. data:: DESC
|
||||||
|
|
||||||
|
A flag for `keys()`, `values()`, `items()` methods to specify that
|
||||||
|
scanning should be in descending direction of keys.
|
@ -1,24 +1,14 @@
|
|||||||
:mod:`builtins` -- builtin functions and exceptions
|
:mod:`builtins` -- builtin functions and exceptions
|
||||||
===================================================
|
===================================================
|
||||||
|
|
||||||
.. module:: builtins
|
|
||||||
:synopsis: builtin Python functions
|
|
||||||
|
|
||||||
All builtin functions and exceptions are described here. They are also
|
All builtin functions and exceptions are described here. They are also
|
||||||
available via the ``builtins`` module.
|
available via ``builtins`` module.
|
||||||
|
|
||||||
For more information about built-ins, see the following CPython documentation:
|
|
||||||
|
|
||||||
* `Builtin CPython Functions <https://docs.python.org/3/library/functions.html>`_
|
|
||||||
* `Builtin CPython Exceptions <https://docs.python.org/3/library/exceptions.html>`_
|
|
||||||
* `Builtin CPython Constants <https://docs.python.org/3/library/constants.html>`_
|
|
||||||
|
|
||||||
.. note:: Not all of these functions, types, exceptions, and constants are turned
|
|
||||||
on in all CircuitPython ports, for space reasons.
|
|
||||||
|
|
||||||
Functions and types
|
Functions and types
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
Not all of these functions and types are turned on in all CircuitPython ports, for space reasons.
|
||||||
|
|
||||||
.. function:: abs()
|
.. function:: abs()
|
||||||
|
|
||||||
.. function:: all()
|
.. function:: all()
|
||||||
@ -33,7 +23,7 @@ Functions and types
|
|||||||
|
|
||||||
.. class:: bytes()
|
.. class:: bytes()
|
||||||
|
|
||||||
|see_cpython| `python:bytes`.
|
|see_cpython| `bytes`.
|
||||||
|
|
||||||
.. function:: callable()
|
.. function:: callable()
|
||||||
|
|
||||||
@ -68,7 +58,7 @@ Functions and types
|
|||||||
|
|
||||||
.. class:: frozenset()
|
.. class:: frozenset()
|
||||||
|
|
||||||
`frozenset()` is not enabled on the smallest CircuitPython boards for space reasons.
|
`frozenset()` is not enabled on non-Express CircuitPython boards.
|
||||||
|
|
||||||
.. function:: getattr()
|
.. function:: getattr()
|
||||||
|
|
||||||
@ -88,12 +78,12 @@ Functions and types
|
|||||||
|
|
||||||
.. classmethod:: from_bytes(bytes, byteorder)
|
.. classmethod:: from_bytes(bytes, byteorder)
|
||||||
|
|
||||||
In CircuitPython, the ``byteorder`` parameter must be positional (this is
|
In CircuitPython, ``byteorder`` parameter must be positional (this is
|
||||||
compatible with CPython).
|
compatible with CPython).
|
||||||
|
|
||||||
.. method:: to_bytes(size, byteorder)
|
.. method:: to_bytes(size, byteorder)
|
||||||
|
|
||||||
In CircuitPython, the ``byteorder`` parameter must be positional (this is
|
In CircuitPython, ``byteorder`` parameter must be positional (this is
|
||||||
compatible with CPython).
|
compatible with CPython).
|
||||||
|
|
||||||
.. function:: isinstance()
|
.. function:: isinstance()
|
||||||
@ -138,7 +128,7 @@ Functions and types
|
|||||||
|
|
||||||
.. function:: reversed()
|
.. function:: reversed()
|
||||||
|
|
||||||
`reversed()` is not enabled on the smallest CircuitPython boards for space reasons.
|
`reversed()` is not enabled on non-Express CircuitPython boards.
|
||||||
|
|
||||||
.. function:: round()
|
.. function:: round()
|
||||||
|
|
||||||
@ -170,34 +160,20 @@ Functions and types
|
|||||||
Exceptions
|
Exceptions
|
||||||
----------
|
----------
|
||||||
|
|
||||||
.. exception:: ArithmeticError
|
|
||||||
|
|
||||||
.. exception:: AssertionError
|
.. exception:: AssertionError
|
||||||
|
|
||||||
.. exception:: AttributeError
|
.. exception:: AttributeError
|
||||||
|
|
||||||
.. exception:: BaseException
|
|
||||||
|
|
||||||
.. exception:: BrokenPipeError
|
|
||||||
|
|
||||||
.. exception:: ConnectionError
|
|
||||||
|
|
||||||
.. exception:: EOFError
|
|
||||||
|
|
||||||
.. exception:: Exception
|
.. exception:: Exception
|
||||||
|
|
||||||
.. exception:: ImportError
|
.. exception:: ImportError
|
||||||
|
|
||||||
.. exception:: IndentationError
|
|
||||||
|
|
||||||
.. exception:: IndexError
|
.. exception:: IndexError
|
||||||
|
|
||||||
.. exception:: KeyboardInterrupt
|
.. exception:: KeyboardInterrupt
|
||||||
|
|
||||||
.. exception:: KeyError
|
.. exception:: KeyError
|
||||||
|
|
||||||
.. exception:: LookupError
|
|
||||||
|
|
||||||
.. exception:: MemoryError
|
.. exception:: MemoryError
|
||||||
|
|
||||||
.. exception:: NameError
|
.. exception:: NameError
|
||||||
@ -206,41 +182,24 @@ Exceptions
|
|||||||
|
|
||||||
.. exception:: OSError
|
.. exception:: OSError
|
||||||
|
|
||||||
.. exception:: OverflowError
|
|
||||||
|
|
||||||
.. exception:: RuntimeError
|
.. exception:: RuntimeError
|
||||||
|
|
||||||
.. exception:: ReloadException
|
.. exception:: ReloadException
|
||||||
|
|
||||||
`ReloadException` is used internally to deal with soft restarts.
|
`ReloadException` is used internally to deal with soft restarts.
|
||||||
|
|
||||||
Not a part of the CPython standard library
|
|
||||||
|
|
||||||
.. exception:: StopAsyncIteration
|
|
||||||
|
|
||||||
.. exception:: StopIteration
|
.. exception:: StopIteration
|
||||||
|
|
||||||
.. exception:: SyntaxError
|
.. exception:: SyntaxError
|
||||||
|
|
||||||
.. exception:: SystemExit
|
.. exception:: SystemExit
|
||||||
|
|
||||||
|see_cpython| `python:SystemExit`.
|
|see_cpython| :py:class:`cpython:SystemExit`.
|
||||||
|
|
||||||
.. exception:: TimeoutError
|
|
||||||
|
|
||||||
.. exception:: TypeError
|
.. exception:: TypeError
|
||||||
|
|
||||||
|see_cpython| `python:TypeError`.
|
|see_cpython| :py:class:`cpython:TypeError`.
|
||||||
|
|
||||||
.. exception:: UnicodeError
|
|
||||||
|
|
||||||
.. exception:: ValueError
|
.. exception:: ValueError
|
||||||
|
|
||||||
.. exception:: ZeroDivisionError
|
.. exception:: ZeroDivisionError
|
||||||
|
|
||||||
Constants
|
|
||||||
---------
|
|
||||||
|
|
||||||
.. data:: Ellipsis
|
|
||||||
|
|
||||||
.. data:: NotImplemented
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
:mod:`collections` -- collection and container types
|
:mod:`collections` -- collection and container types
|
||||||
====================================================
|
=====================================================
|
||||||
|
|
||||||
**Limitations:** Not implemented on the smallest CircuitPython boards for space reasons.
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
.. module:: collections
|
.. module:: collections
|
||||||
:synopsis: collection and container types
|
:synopsis: collection and container types
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:collections`.
|
|see_cpython_module| :mod:`cpython:collections`.
|
||||||
|
|
||||||
This module implements advanced collection and container types to
|
This module implements advanced collection and container types to
|
||||||
hold/accumulate various objects.
|
hold/accumulate various objects.
|
||||||
@ -14,7 +14,7 @@ hold/accumulate various objects.
|
|||||||
Classes
|
Classes
|
||||||
-------
|
-------
|
||||||
|
|
||||||
.. class:: deque(iterable, maxlen[, flags])
|
.. function:: deque(iterable, maxlen[, flags])
|
||||||
|
|
||||||
Deques (double-ended queues) are a list-like container that support O(1)
|
Deques (double-ended queues) are a list-like container that support O(1)
|
||||||
appends and pops from either side of the deque. New deques are created
|
appends and pops from either side of the deque. New deques are created
|
||||||
@ -28,7 +28,7 @@ Classes
|
|||||||
|
|
||||||
- The optional *flags* can be 1 to check for overflow when adding items.
|
- The optional *flags* can be 1 to check for overflow when adding items.
|
||||||
|
|
||||||
As well as supporting ``bool`` and ``len``, deque objects have the following
|
As well as supporting `bool` and `len`, deque objects have the following
|
||||||
methods:
|
methods:
|
||||||
|
|
||||||
.. method:: deque.append(x)
|
.. method:: deque.append(x)
|
||||||
@ -59,7 +59,7 @@ Classes
|
|||||||
print(t1.name)
|
print(t1.name)
|
||||||
assert t2.name == t2[1]
|
assert t2.name == t2[1]
|
||||||
|
|
||||||
.. class:: OrderedDict(...)
|
.. function:: OrderedDict(...)
|
||||||
|
|
||||||
``dict`` type subclass which remembers and preserves the order of keys
|
``dict`` type subclass which remembers and preserves the order of keys
|
||||||
added. When ordered dict is iterated over, keys/items are returned in
|
added. When ordered dict is iterated over, keys/items are returned in
|
||||||
|
@ -4,10 +4,10 @@
|
|||||||
.. module:: errno
|
.. module:: errno
|
||||||
:synopsis: system error codes
|
:synopsis: system error codes
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:errno`.
|
|see_cpython_module| :mod:`cpython:errno`.
|
||||||
|
|
||||||
This module provides access to symbolic error codes for `OSError` exception.
|
This module provides access to symbolic error codes for `OSError` exception.
|
||||||
The codes available may vary per CircuitPython build.
|
A particular inventory of codes depends on :term:`MicroPython port`.
|
||||||
|
|
||||||
Constants
|
Constants
|
||||||
---------
|
---------
|
||||||
@ -15,13 +15,14 @@ Constants
|
|||||||
.. data:: EEXIST, EAGAIN, etc.
|
.. data:: EEXIST, EAGAIN, etc.
|
||||||
|
|
||||||
Error codes, based on ANSI C/POSIX standard. All error codes start with
|
Error codes, based on ANSI C/POSIX standard. All error codes start with
|
||||||
"E". Errors are usually accessible as ``exc.errno``
|
"E". As mentioned above, inventory of the codes depends on
|
||||||
|
:term:`MicroPython port`. Errors are usually accessible as ``exc.args[0]``
|
||||||
where ``exc`` is an instance of `OSError`. Usage example::
|
where ``exc`` is an instance of `OSError`. Usage example::
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir("my_dir")
|
os.mkdir("my_dir")
|
||||||
except OSError as exc:
|
except OSError as exc:
|
||||||
if exc.errno == errno.EEXIST:
|
if exc.args[0] == errno.EEXIST:
|
||||||
print("Directory already exists")
|
print("Directory already exists")
|
||||||
|
|
||||||
.. data:: errorcode
|
.. data:: errorcode
|
||||||
|
170
docs/library/framebuf.rst
Normal file
170
docs/library/framebuf.rst
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
:mod:`framebuf` --- frame buffer manipulation
|
||||||
|
=============================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
|
.. module:: framebuf
|
||||||
|
:synopsis: Frame buffer manipulation
|
||||||
|
|
||||||
|
This module provides a general frame buffer which can be used to create
|
||||||
|
bitmap images, which can then be sent to a display.
|
||||||
|
|
||||||
|
class FrameBuffer
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
The FrameBuffer class provides a pixel buffer which can be drawn upon with
|
||||||
|
pixels, lines, rectangles, text and even other FrameBuffer's. It is useful
|
||||||
|
when generating output for displays.
|
||||||
|
|
||||||
|
For example::
|
||||||
|
|
||||||
|
import framebuf
|
||||||
|
|
||||||
|
# FrameBuffer needs 2 bytes for every RGB565 pixel
|
||||||
|
fbuf = framebuf.FrameBuffer(bytearray(10 * 100 * 2), 10, 100, framebuf.RGB565)
|
||||||
|
|
||||||
|
fbuf.fill(0)
|
||||||
|
fbuf.text('MicroPython!', 0, 0, 0xffff)
|
||||||
|
fbuf.hline(0, 10, 96, 0xffff)
|
||||||
|
|
||||||
|
Constructors
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. class:: FrameBuffer(buffer, width, height, format, stride=width, /)
|
||||||
|
|
||||||
|
Construct a FrameBuffer object. The parameters are:
|
||||||
|
|
||||||
|
- *buffer* is an object with a buffer protocol which must be large
|
||||||
|
enough to contain every pixel defined by the width, height and
|
||||||
|
format of the FrameBuffer.
|
||||||
|
- *width* is the width of the FrameBuffer in pixels
|
||||||
|
- *height* is the height of the FrameBuffer in pixels
|
||||||
|
- *format* specifies the type of pixel used in the FrameBuffer;
|
||||||
|
permissible values are listed under Constants below. These set the
|
||||||
|
number of bits used to encode a color value and the layout of these
|
||||||
|
bits in *buffer*.
|
||||||
|
Where a color value c is passed to a method, c is a small integer
|
||||||
|
with an encoding that is dependent on the format of the FrameBuffer.
|
||||||
|
- *stride* is the number of pixels between each horizontal line
|
||||||
|
of pixels in the FrameBuffer. This defaults to *width* but may
|
||||||
|
need adjustments when implementing a FrameBuffer within another
|
||||||
|
larger FrameBuffer or screen. The *buffer* size must accommodate
|
||||||
|
an increased step size.
|
||||||
|
|
||||||
|
One must specify valid *buffer*, *width*, *height*, *format* and
|
||||||
|
optionally *stride*. Invalid *buffer* size or dimensions may lead to
|
||||||
|
unexpected errors.
|
||||||
|
|
||||||
|
Drawing primitive shapes
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
The following methods draw shapes onto the FrameBuffer.
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.fill(c)
|
||||||
|
|
||||||
|
Fill the entire FrameBuffer with the specified color.
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.pixel(x, y[, c])
|
||||||
|
|
||||||
|
If *c* is not given, get the color value of the specified pixel.
|
||||||
|
If *c* is given, set the specified pixel to the given color.
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.hline(x, y, w, c)
|
||||||
|
.. method:: FrameBuffer.vline(x, y, h, c)
|
||||||
|
.. method:: FrameBuffer.line(x1, y1, x2, y2, c)
|
||||||
|
|
||||||
|
Draw a line from a set of coordinates using the given color and
|
||||||
|
a thickness of 1 pixel. The `line` method draws the line up to
|
||||||
|
a second set of coordinates whereas the `hline` and `vline`
|
||||||
|
methods draw horizontal and vertical lines respectively up to
|
||||||
|
a given length.
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.rect(x, y, w, h, c)
|
||||||
|
.. method:: FrameBuffer.fill_rect(x, y, w, h, c)
|
||||||
|
|
||||||
|
Draw a rectangle at the given location, size and color. The `rect`
|
||||||
|
method draws only a 1 pixel outline whereas the `fill_rect` method
|
||||||
|
draws both the outline and interior.
|
||||||
|
|
||||||
|
Drawing text
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.text(s, x, y[, c])
|
||||||
|
|
||||||
|
Write text to the FrameBuffer using the the coordinates as the upper-left
|
||||||
|
corner of the text. The color of the text can be defined by the optional
|
||||||
|
argument but is otherwise a default value of 1. All characters have
|
||||||
|
dimensions of 8x8 pixels and there is currently no way to change the font.
|
||||||
|
|
||||||
|
|
||||||
|
Other methods
|
||||||
|
-------------
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.scroll(xstep, ystep)
|
||||||
|
|
||||||
|
Shift the contents of the FrameBuffer by the given vector. This may
|
||||||
|
leave a footprint of the previous colors in the FrameBuffer.
|
||||||
|
|
||||||
|
.. method:: FrameBuffer.blit(fbuf, x, y, key=-1, palette=None)
|
||||||
|
|
||||||
|
Draw another FrameBuffer on top of the current one at the given coordinates.
|
||||||
|
If *key* is specified then it should be a color integer and the
|
||||||
|
corresponding color will be considered transparent: all pixels with that
|
||||||
|
color value will not be drawn.
|
||||||
|
|
||||||
|
The *palette* argument enables blitting between FrameBuffers with differing
|
||||||
|
formats. Typical usage is to render a monochrome or grayscale glyph/icon to
|
||||||
|
a color display. The *palette* is a FrameBuffer instance whose format is
|
||||||
|
that of the current FrameBuffer. The *palette* height is one pixel and its
|
||||||
|
pixel width is the number of colors in the source FrameBuffer. The *palette*
|
||||||
|
for an N-bit source needs 2**N pixels; the *palette* for a monochrome source
|
||||||
|
would have 2 pixels representing background and foreground colors. The
|
||||||
|
application assigns a color to each pixel in the *palette*. The color of the
|
||||||
|
current pixel will be that of that *palette* pixel whose x position is the
|
||||||
|
color of the corresponding source pixel.
|
||||||
|
|
||||||
|
Constants
|
||||||
|
---------
|
||||||
|
|
||||||
|
.. data:: framebuf.MONO_VLSB
|
||||||
|
|
||||||
|
Monochrome (1-bit) color format
|
||||||
|
This defines a mapping where the bits in a byte are vertically mapped with
|
||||||
|
bit 0 being nearest the top of the screen. Consequently each byte occupies
|
||||||
|
8 vertical pixels. Subsequent bytes appear at successive horizontal
|
||||||
|
locations until the rightmost edge is reached. Further bytes are rendered
|
||||||
|
at locations starting at the leftmost edge, 8 pixels lower.
|
||||||
|
|
||||||
|
.. data:: framebuf.MONO_HLSB
|
||||||
|
|
||||||
|
Monochrome (1-bit) color format
|
||||||
|
This defines a mapping where the bits in a byte are horizontally mapped.
|
||||||
|
Each byte occupies 8 horizontal pixels with bit 7 being the leftmost.
|
||||||
|
Subsequent bytes appear at successive horizontal locations until the
|
||||||
|
rightmost edge is reached. Further bytes are rendered on the next row, one
|
||||||
|
pixel lower.
|
||||||
|
|
||||||
|
.. data:: framebuf.MONO_HMSB
|
||||||
|
|
||||||
|
Monochrome (1-bit) color format
|
||||||
|
This defines a mapping where the bits in a byte are horizontally mapped.
|
||||||
|
Each byte occupies 8 horizontal pixels with bit 0 being the leftmost.
|
||||||
|
Subsequent bytes appear at successive horizontal locations until the
|
||||||
|
rightmost edge is reached. Further bytes are rendered on the next row, one
|
||||||
|
pixel lower.
|
||||||
|
|
||||||
|
.. data:: framebuf.RGB565
|
||||||
|
|
||||||
|
Red Green Blue (16-bit, 5+6+5) color format
|
||||||
|
|
||||||
|
.. data:: framebuf.GS2_HMSB
|
||||||
|
|
||||||
|
Grayscale (2-bit) color format
|
||||||
|
|
||||||
|
.. data:: framebuf.GS4_HMSB
|
||||||
|
|
||||||
|
Grayscale (4-bit) color format
|
||||||
|
|
||||||
|
.. data:: framebuf.GS8
|
||||||
|
|
||||||
|
Grayscale (8-bit) color format
|
@ -1,10 +1,12 @@
|
|||||||
:mod:`gc` -- control the garbage collector
|
:mod:`gc` -- control the garbage collector
|
||||||
==========================================
|
==========================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
.. module:: gc
|
.. module:: gc
|
||||||
:synopsis: control the garbage collector
|
:synopsis: control the garbage collector
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:gc`.
|
|see_cpython_module| :mod:`cpython:gc`.
|
||||||
|
|
||||||
Functions
|
Functions
|
||||||
---------
|
---------
|
||||||
@ -24,7 +26,7 @@ Functions
|
|||||||
|
|
||||||
.. function:: mem_alloc()
|
.. function:: mem_alloc()
|
||||||
|
|
||||||
Return the number of bytes of heap RAM that are allocated by Python code.
|
Return the number of bytes of heap RAM that are allocated.
|
||||||
|
|
||||||
.. admonition:: Difference to CPython
|
.. admonition:: Difference to CPython
|
||||||
:class: attention
|
:class: attention
|
||||||
@ -33,8 +35,8 @@ Functions
|
|||||||
|
|
||||||
.. function:: mem_free()
|
.. function:: mem_free()
|
||||||
|
|
||||||
Return the number of bytes of heap RAM that is available for Python
|
Return the number of bytes of available heap RAM, or -1 if this amount
|
||||||
code to allocate, or -1 if this amount is not known.
|
is not known.
|
||||||
|
|
||||||
.. admonition:: Difference to CPython
|
.. admonition:: Difference to CPython
|
||||||
:class: attention
|
:class: attention
|
||||||
@ -61,6 +63,6 @@ Functions
|
|||||||
.. admonition:: Difference to CPython
|
.. admonition:: Difference to CPython
|
||||||
:class: attention
|
:class: attention
|
||||||
|
|
||||||
This function is a MicroPython extension. CPython has a similar
|
This function is a a MicroPython extension. CPython has a similar
|
||||||
function - ``set_threshold()``, but due to different GC
|
function - ``set_threshold()``, but due to different GC
|
||||||
implementations, its signature and semantics are different.
|
implementations, its signature and semantics are different.
|
||||||
|
59
docs/library/hashlib.rst
Normal file
59
docs/library/hashlib.rst
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
:mod:`hashlib` -- hashing algorithms
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
|
.. module:: hashlib
|
||||||
|
:synopsis: hashing algorithms
|
||||||
|
|
||||||
|
|see_cpython_module| :mod:`cpython:hashlib`.
|
||||||
|
|
||||||
|
This module implements binary data hashing algorithms. The exact inventory
|
||||||
|
of available algorithms depends on a board. Among the algorithms which may
|
||||||
|
be implemented:
|
||||||
|
|
||||||
|
* SHA256 - The current generation, modern hashing algorithm (of SHA2 series).
|
||||||
|
It is suitable for cryptographically-secure purposes. Included in the
|
||||||
|
MicroPython core and any board is recommended to provide this, unless
|
||||||
|
it has particular code size constraints.
|
||||||
|
|
||||||
|
* SHA1 - A previous generation algorithm. Not recommended for new usages,
|
||||||
|
but SHA1 is a part of number of Internet standards and existing
|
||||||
|
applications, so boards targeting network connectivity and
|
||||||
|
interoperability will try to provide this.
|
||||||
|
|
||||||
|
* MD5 - A legacy algorithm, not considered cryptographically secure. Only
|
||||||
|
selected boards, targeting interoperability with legacy applications,
|
||||||
|
will offer this.
|
||||||
|
|
||||||
|
Constructors
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. class:: hashlib.sha256([data])
|
||||||
|
|
||||||
|
Create an SHA256 hasher object and optionally feed ``data`` into it.
|
||||||
|
|
||||||
|
.. class:: hashlib.sha1([data])
|
||||||
|
|
||||||
|
Create an SHA1 hasher object and optionally feed ``data`` into it.
|
||||||
|
|
||||||
|
.. class:: hashlib.md5([data])
|
||||||
|
|
||||||
|
Create an MD5 hasher object and optionally feed ``data`` into it.
|
||||||
|
|
||||||
|
Methods
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. method:: hash.update(data)
|
||||||
|
|
||||||
|
Feed more binary data into hash.
|
||||||
|
|
||||||
|
.. method:: hash.digest()
|
||||||
|
|
||||||
|
Return hash for all data passed through hash, as a bytes object. After this
|
||||||
|
method is called, more data cannot be fed into the hash any longer.
|
||||||
|
|
||||||
|
.. method:: hash.hexdigest()
|
||||||
|
|
||||||
|
This method is NOT implemented. Use ``binascii.hexlify(hash.digest())``
|
||||||
|
to achieve a similar effect.
|
@ -6,7 +6,7 @@
|
|||||||
.. module:: heapq
|
.. module:: heapq
|
||||||
:synopsis: heap queue algorithm
|
:synopsis: heap queue algorithm
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:heapq`.
|
|see_cpython_module| :mod:`cpython:heapq`.
|
||||||
|
|
||||||
This module implements the
|
This module implements the
|
||||||
`min heap queue algorithm <https://en.wikipedia.org/wiki/Heap_%28data_structure%29>`_.
|
`min heap queue algorithm <https://en.wikipedia.org/wiki/Heap_%28data_structure%29>`_.
|
||||||
|
@ -1,23 +1,25 @@
|
|||||||
.. _micropython_lib:
|
.. _micropython_lib:
|
||||||
|
|
||||||
Standard Libraries
|
MicroPython libraries
|
||||||
==================
|
=====================
|
||||||
|
|
||||||
Python standard libraries
|
Python standard libraries and micro-libraries
|
||||||
-------------------------
|
---------------------------------------------
|
||||||
|
|
||||||
The libraries below implement a subset of the corresponding
|
The libraries below are inherited from MicroPython.
|
||||||
standard Python (CPython) library. They are implemented in C, not Python.
|
They are similar to the standard Python libraries with the same name.
|
||||||
|
They implement a subset of or a variant of the corresponding
|
||||||
|
standard Python library.
|
||||||
|
|
||||||
CircuitPython's long-term goal is that code written in CircuitPython
|
CircuitPython's long-term goal is that code written in CircuitPython
|
||||||
using Python standard libraries will be runnable on CPython without changes.
|
using Python standard libraries will be runnable on CPython without changes.
|
||||||
|
|
||||||
These libraries are not enabled on CircuitPython builds with
|
These libraries are not enabled on CircuitPython builds with
|
||||||
limited flash memory:
|
limited flash memory, usually on non-Express builds:
|
||||||
``binascii``, ``errno``, ``json``, ``re``.
|
``binascii``, ``errno``, ``json``, ``re``.
|
||||||
|
|
||||||
These libraries are not currently enabled in any CircuitPython build, but may be in the future:
|
These libraries are not currently enabled in any CircuitPython build, but may be in the future:
|
||||||
``ctypes``, ``platform``
|
``ctypes``, ``hashlib``, ``zlib``.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
@ -29,19 +31,21 @@ These libraries are not currently enabled in any CircuitPython build, but may be
|
|||||||
collections.rst
|
collections.rst
|
||||||
errno.rst
|
errno.rst
|
||||||
gc.rst
|
gc.rst
|
||||||
|
hashlib.rst
|
||||||
io.rst
|
io.rst
|
||||||
json.rst
|
json.rst
|
||||||
platform.rst
|
|
||||||
re.rst
|
re.rst
|
||||||
sys.rst
|
sys.rst
|
||||||
|
asyncio.rst
|
||||||
ctypes.rst
|
ctypes.rst
|
||||||
select.rst
|
select.rst
|
||||||
|
zlib.rst
|
||||||
|
|
||||||
Omitted ``string`` functions
|
Omitted functions in the ``string`` library
|
||||||
----------------------------
|
-------------------------------------------
|
||||||
|
|
||||||
A few string operations are not enabled on small builds
|
A few string operations are not enabled on small builds
|
||||||
due to limited flash memory:
|
(usually non-Express), due to limited flash memory:
|
||||||
``string.center()``, ``string.partition()``, ``string.splitlines()``,
|
``string.center()``, ``string.partition()``, ``string.splitlines()``,
|
||||||
``string.reversed()``.
|
``string.reversed()``.
|
||||||
|
|
||||||
@ -49,10 +53,13 @@ due to limited flash memory:
|
|||||||
CircuitPython/MicroPython-specific libraries
|
CircuitPython/MicroPython-specific libraries
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
|
|
||||||
Functionality specific to the CircuitPython/MicroPython implementations is available in
|
Functionality specific to the CircuitPython/MicroPython implementation is available in
|
||||||
the following libraries.
|
the following libraries. These libraries may change significantly or be removed in future
|
||||||
|
versions of CircuitPython.
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
btree.rst
|
||||||
|
framebuf.rst
|
||||||
micropython.rst
|
micropython.rst
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
:mod:`io` -- input/output streams
|
:mod:`io` -- input/output streams
|
||||||
=================================
|
==================================
|
||||||
|
|
||||||
.. module:: io
|
.. module:: io
|
||||||
:synopsis: input/output streams
|
:synopsis: input/output streams
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:io`.
|
|see_cpython_module| :mod:`cpython:io`.
|
||||||
|
|
||||||
This module contains additional types of ``stream`` (file-like) objects
|
This module contains additional types of ``stream`` (file-like) objects
|
||||||
and helper functions.
|
and helper functions.
|
||||||
@ -112,20 +112,3 @@ Classes
|
|||||||
.. method:: getvalue()
|
.. method:: getvalue()
|
||||||
|
|
||||||
Get the current contents of the underlying buffer which holds data.
|
Get the current contents of the underlying buffer which holds data.
|
||||||
|
|
||||||
.. class:: StringIO(alloc_size)
|
|
||||||
:noindex:
|
|
||||||
.. class:: BytesIO(alloc_size)
|
|
||||||
:noindex:
|
|
||||||
|
|
||||||
Create an empty `StringIO`/`BytesIO` object, preallocated to hold up
|
|
||||||
to *alloc_size* number of bytes. That means that writing that amount
|
|
||||||
of bytes won't lead to reallocation of the buffer, and thus won't hit
|
|
||||||
out-of-memory situation or lead to memory fragmentation. These constructors
|
|
||||||
are a MicroPython extension and are recommended for usage only in special
|
|
||||||
cases and in system-level libraries, not for end-user applications.
|
|
||||||
|
|
||||||
.. admonition:: Difference to CPython
|
|
||||||
:class: attention
|
|
||||||
|
|
||||||
These constructors are a MicroPython extension.
|
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
:mod:`json` -- JSON encoding and decoding
|
:mod:`json` -- JSON encoding and decoding
|
||||||
=========================================
|
==========================================
|
||||||
|
|
||||||
.. module:: json
|
.. module:: json
|
||||||
:synopsis: JSON encoding and decoding
|
:synopsis: JSON encoding and decoding
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:json`.
|
|see_cpython_module| :mod:`cpython:json`.
|
||||||
|
|
||||||
This modules allows to convert between Python objects and the JSON
|
This modules allows to convert between Python objects and the JSON
|
||||||
data format.
|
data format.
|
||||||
@ -12,20 +12,14 @@ data format.
|
|||||||
Functions
|
Functions
|
||||||
---------
|
---------
|
||||||
|
|
||||||
.. function:: dump(obj, stream, separators=None)
|
.. function:: dump(obj, stream)
|
||||||
|
|
||||||
Serialise ``obj`` to a JSON string, writing it to the given *stream*.
|
Serialise ``obj`` to a JSON string, writing it to the given *stream*.
|
||||||
|
|
||||||
If specified, separators should be an ``(item_separator, key_separator)``
|
.. function:: dumps(obj)
|
||||||
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
|
|
||||||
representation, you should specify ``(',', ':')`` to eliminate whitespace.
|
|
||||||
|
|
||||||
.. function:: dumps(obj, separators=None)
|
|
||||||
|
|
||||||
Return ``obj`` represented as a JSON string.
|
Return ``obj`` represented as a JSON string.
|
||||||
|
|
||||||
The arguments have the same meaning as in `dump`.
|
|
||||||
|
|
||||||
.. function:: load(stream)
|
.. function:: load(stream)
|
||||||
|
|
||||||
Parse the given ``stream``, interpreting it as a JSON string and
|
Parse the given ``stream``, interpreting it as a JSON string and
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
:mod:`micropython` -- MicroPython extensions and internals
|
:mod:`micropython` -- access and control MicroPython internals
|
||||||
==========================================================
|
==============================================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
.. module:: micropython
|
.. module:: micropython
|
||||||
:synopsis: access and control MicroPython internals
|
:synopsis: access and control MicroPython internals
|
||||||
@ -9,7 +11,7 @@ Functions
|
|||||||
|
|
||||||
.. function:: const(expr)
|
.. function:: const(expr)
|
||||||
|
|
||||||
Used to declare that the expression is a constant so that the compiler can
|
Used to declare that the expression is a constant so that the compile can
|
||||||
optimise it. The use of this function should be as follows::
|
optimise it. The use of this function should be as follows::
|
||||||
|
|
||||||
from micropython import const
|
from micropython import const
|
||||||
@ -26,3 +28,111 @@ Functions
|
|||||||
provided as part of the :mod:`micropython` module mainly so that scripts can be
|
provided as part of the :mod:`micropython` module mainly so that scripts can be
|
||||||
written which run under both CPython and MicroPython, by following the above
|
written which run under both CPython and MicroPython, by following the above
|
||||||
pattern.
|
pattern.
|
||||||
|
|
||||||
|
.. function:: opt_level([level])
|
||||||
|
|
||||||
|
If *level* is given then this function sets the optimisation level for subsequent
|
||||||
|
compilation of scripts, and returns ``None``. Otherwise it returns the current
|
||||||
|
optimisation level.
|
||||||
|
|
||||||
|
The optimisation level controls the following compilation features:
|
||||||
|
|
||||||
|
- Assertions: at level 0 assertion statements are enabled and compiled into the
|
||||||
|
bytecode; at levels 1 and higher assertions are not compiled.
|
||||||
|
- Built-in ``__debug__`` variable: at level 0 this variable expands to ``True``;
|
||||||
|
at levels 1 and higher it expands to ``False``.
|
||||||
|
- Source-code line numbers: at levels 0, 1 and 2 source-code line number are
|
||||||
|
stored along with the bytecode so that exceptions can report the line number
|
||||||
|
they occurred at; at levels 3 and higher line numbers are not stored.
|
||||||
|
|
||||||
|
The default optimisation level is usually level 0.
|
||||||
|
|
||||||
|
.. function:: mem_info([verbose])
|
||||||
|
|
||||||
|
Print information about currently used memory. If the *verbose* argument
|
||||||
|
is given then extra information is printed.
|
||||||
|
|
||||||
|
The information that is printed is implementation dependent, but currently
|
||||||
|
includes the amount of stack and heap used. In verbose mode it prints out
|
||||||
|
the entire heap indicating which blocks are used and which are free.
|
||||||
|
|
||||||
|
.. function:: qstr_info([verbose])
|
||||||
|
|
||||||
|
Print information about currently interned strings. If the *verbose*
|
||||||
|
argument is given then extra information is printed.
|
||||||
|
|
||||||
|
The information that is printed is implementation dependent, but currently
|
||||||
|
includes the number of interned strings and the amount of RAM they use. In
|
||||||
|
verbose mode it prints out the names of all RAM-interned strings.
|
||||||
|
|
||||||
|
.. function:: stack_use()
|
||||||
|
|
||||||
|
Return an integer representing the current amount of stack that is being
|
||||||
|
used. The absolute value of this is not particularly useful, rather it
|
||||||
|
should be used to compute differences in stack usage at different points.
|
||||||
|
|
||||||
|
.. function:: heap_lock()
|
||||||
|
.. function:: heap_unlock()
|
||||||
|
.. function:: heap_locked()
|
||||||
|
|
||||||
|
Lock or unlock the heap. When locked no memory allocation can occur and a
|
||||||
|
`MemoryError` will be raised if any heap allocation is attempted.
|
||||||
|
`heap_locked()` returns a true value if the heap is currently locked.
|
||||||
|
|
||||||
|
These functions can be nested, ie `heap_lock()` can be called multiple times
|
||||||
|
in a row and the lock-depth will increase, and then `heap_unlock()` must be
|
||||||
|
called the same number of times to make the heap available again.
|
||||||
|
|
||||||
|
Both `heap_unlock()` and `heap_locked()` return the current lock depth
|
||||||
|
(after unlocking for the former) as a non-negative integer, with 0 meaning
|
||||||
|
the heap is not locked.
|
||||||
|
|
||||||
|
If the REPL becomes active with the heap locked then it will be forcefully
|
||||||
|
unlocked.
|
||||||
|
|
||||||
|
Note: `heap_locked()` is not enabled on most ports by default,
|
||||||
|
requires ``MICROPY_PY_MICROPYTHON_HEAP_LOCKED``.
|
||||||
|
|
||||||
|
.. function:: kbd_intr(chr)
|
||||||
|
|
||||||
|
Set the character that will raise a `KeyboardInterrupt` exception. By
|
||||||
|
default this is set to 3 during script execution, corresponding to Ctrl-C.
|
||||||
|
Passing -1 to this function will disable capture of Ctrl-C, and passing 3
|
||||||
|
will restore it.
|
||||||
|
|
||||||
|
This function can be used to prevent the capturing of Ctrl-C on the
|
||||||
|
incoming stream of characters that is usually used for the REPL, in case
|
||||||
|
that stream is used for other purposes.
|
||||||
|
|
||||||
|
.. function:: schedule(func, arg)
|
||||||
|
|
||||||
|
Schedule the function *func* to be executed "very soon". The function
|
||||||
|
is passed the value *arg* as its single argument. "Very soon" means that
|
||||||
|
the MicroPython runtime will do its best to execute the function at the
|
||||||
|
earliest possible time, given that it is also trying to be efficient, and
|
||||||
|
that the following conditions hold:
|
||||||
|
|
||||||
|
- A scheduled function will never preempt another scheduled function.
|
||||||
|
- Scheduled functions are always executed "between opcodes" which means
|
||||||
|
that all fundamental Python operations (such as appending to a list)
|
||||||
|
are guaranteed to be atomic.
|
||||||
|
- A given port may define "critical regions" within which scheduled
|
||||||
|
functions will never be executed. Functions may be scheduled within
|
||||||
|
a critical region but they will not be executed until that region
|
||||||
|
is exited. An example of a critical region is a preempting interrupt
|
||||||
|
handler (an IRQ).
|
||||||
|
|
||||||
|
A use for this function is to schedule a callback from a preempting IRQ.
|
||||||
|
Such an IRQ puts restrictions on the code that runs in the IRQ (for example
|
||||||
|
the heap may be locked) and scheduling a function to call later will lift
|
||||||
|
those restrictions.
|
||||||
|
|
||||||
|
Note: If `schedule()` is called from a preempting IRQ, when memory
|
||||||
|
allocation is not allowed and the callback to be passed to `schedule()` is
|
||||||
|
a bound method, passing this directly will fail. This is because creating a
|
||||||
|
reference to a bound method causes memory allocation. A solution is to
|
||||||
|
create a reference to the method in the class constructor and to pass that
|
||||||
|
reference to `schedule()`.
|
||||||
|
|
||||||
|
There is a finite queue to hold the scheduled functions and `schedule()`
|
||||||
|
will raise a `RuntimeError` if the queue is full.
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
:mod:`platform` -- access to underlying platform’s identifying data
|
|
||||||
===================================================================
|
|
||||||
|
|
||||||
.. module:: platform
|
|
||||||
:synopsis: access to underlying platform’s identifying data
|
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:platform`.
|
|
||||||
|
|
||||||
This module tries to retrieve as much platform-identifying data as possible. It
|
|
||||||
makes this information available via function APIs.
|
|
||||||
|
|
||||||
Functions
|
|
||||||
---------
|
|
||||||
|
|
||||||
.. function:: platform()
|
|
||||||
|
|
||||||
Returns a string identifying the underlying platform. This string is composed
|
|
||||||
of several substrings in the following order, delimited by dashes (``-``):
|
|
||||||
|
|
||||||
- the name of the platform system (e.g. Unix, Windows or MicroPython)
|
|
||||||
- the MicroPython version
|
|
||||||
- the architecture of the platform
|
|
||||||
- the version of the underlying platform
|
|
||||||
- the concatenation of the name of the libc that MicroPython is linked to
|
|
||||||
and its corresponding version.
|
|
||||||
|
|
||||||
For example, this could be
|
|
||||||
``"MicroPython-1.20.0-xtensa-IDFv4.2.4-with-newlib3.0.0"``.
|
|
||||||
|
|
||||||
.. function:: python_compiler()
|
|
||||||
|
|
||||||
Returns a string identifying the compiler used for compiling MicroPython.
|
|
||||||
|
|
||||||
.. function:: libc_ver()
|
|
||||||
|
|
||||||
Returns a tuple of strings *(lib, version)*, where *lib* is the name of the
|
|
||||||
libc that MicroPython is linked to, and *version* the corresponding version
|
|
||||||
of this libc.
|
|
@ -1,10 +1,10 @@
|
|||||||
:mod:`re` -- simple regular expressions
|
:mod:`re` -- simple regular expressions
|
||||||
=======================================
|
========================================
|
||||||
|
|
||||||
.. module:: re
|
.. module:: re
|
||||||
:synopsis: regular expressions
|
:synopsis: regular expressions
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:re`.
|
|see_cpython_module| :mod:`cpython:re`.
|
||||||
|
|
||||||
This module implements regular expression operations. Regular expression
|
This module implements regular expression operations. Regular expression
|
||||||
syntax supported is a subset of CPython ``re`` module (and actually is
|
syntax supported is a subset of CPython ``re`` module (and actually is
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
:mod:`select` -- wait for events on a set of streams
|
:mod:`select` -- wait for events on a set of streams
|
||||||
====================================================
|
====================================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
.. module:: select
|
.. module:: select
|
||||||
:synopsis: wait for events on a set of streams
|
:synopsis: wait for events on a set of streams
|
||||||
|
|
||||||
@ -84,7 +86,7 @@ Methods
|
|||||||
.. method:: poll.ipoll(timeout=-1, flags=0, /)
|
.. method:: poll.ipoll(timeout=-1, flags=0, /)
|
||||||
|
|
||||||
Like :meth:`poll.poll`, but instead returns an iterator which yields a
|
Like :meth:`poll.poll`, but instead returns an iterator which yields a
|
||||||
"callee-owned tuple". This function provides an efficient, allocation-free
|
``callee-owned tuples``. This function provides efficient, allocation-free
|
||||||
way to poll on streams.
|
way to poll on streams.
|
||||||
|
|
||||||
If *flags* is 1, one-shot behaviour for events is employed: streams for
|
If *flags* is 1, one-shot behaviour for events is employed: streams for
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
:mod:`sys` -- system specific functions
|
:mod:`sys` -- system specific functions
|
||||||
=======================================
|
========================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
.. module:: sys
|
.. module:: sys
|
||||||
:synopsis: system specific functions
|
:synopsis: system specific functions
|
||||||
|
|
||||||
|see_cpython_module| :mod:`python:sys`.
|
|see_cpython_module| :mod:`cpython:sys`.
|
||||||
|
|
||||||
Functions
|
Functions
|
||||||
---------
|
---------
|
||||||
@ -33,8 +35,6 @@ Constants
|
|||||||
|
|
||||||
* *name* - string "circuitpython"
|
* *name* - string "circuitpython"
|
||||||
* *version* - tuple (major, minor, micro), e.g. (1, 7, 0)
|
* *version* - tuple (major, minor, micro), e.g. (1, 7, 0)
|
||||||
* *_machine* - string describing the underlying machine
|
|
||||||
* *_mpy* - supported mpy file-format version (optional attribute)
|
|
||||||
|
|
||||||
This object is the recommended way to distinguish CircuitPython from other
|
This object is the recommended way to distinguish CircuitPython from other
|
||||||
Python implementations (note that it still may not exist in the very
|
Python implementations (note that it still may not exist in the very
|
||||||
@ -80,14 +80,6 @@ Constants
|
|||||||
|
|
||||||
A mutable list of directories to search for imported modules.
|
A mutable list of directories to search for imported modules.
|
||||||
|
|
||||||
.. admonition:: Difference to CPython
|
|
||||||
:class: attention
|
|
||||||
|
|
||||||
On MicroPython, an entry with the value ``".frozen"`` will indicate that import
|
|
||||||
should search :term:`frozen modules <frozen module>` at that point in the search.
|
|
||||||
If no frozen module is found then search will *not* look for a directory called
|
|
||||||
``.frozen``, instead it will continue with the next entry in ``sys.path``.
|
|
||||||
|
|
||||||
.. data:: platform
|
.. data:: platform
|
||||||
|
|
||||||
The platform that CircuitPython is running on. For OS/RTOS ports, this is
|
The platform that CircuitPython is running on. For OS/RTOS ports, this is
|
||||||
@ -97,12 +89,6 @@ Constants
|
|||||||
If you need to check whether your program runs on CircuitPython (vs other
|
If you need to check whether your program runs on CircuitPython (vs other
|
||||||
Python implementation), use `sys.implementation` instead.
|
Python implementation), use `sys.implementation` instead.
|
||||||
|
|
||||||
.. data:: ps1
|
|
||||||
ps2
|
|
||||||
|
|
||||||
Mutable attributes holding strings, which are used for the REPL prompt. The defaults
|
|
||||||
give the standard Python prompt of ``>>>`` and ``...``.
|
|
||||||
|
|
||||||
.. data:: stderr
|
.. data:: stderr
|
||||||
|
|
||||||
Standard error ``stream``.
|
Standard error ``stream``.
|
||||||
@ -115,14 +101,6 @@ Constants
|
|||||||
|
|
||||||
Standard output ``stream``.
|
Standard output ``stream``.
|
||||||
|
|
||||||
.. data:: tracebacklimit
|
|
||||||
|
|
||||||
A mutable attribute holding an integer value which is the maximum number of traceback
|
|
||||||
entries to store in an exception. Set to 0 to disable adding tracebacks. Defaults
|
|
||||||
to 1000.
|
|
||||||
|
|
||||||
Note: this is not available on all ports.
|
|
||||||
|
|
||||||
.. data:: version
|
.. data:: version
|
||||||
|
|
||||||
Python language version that this implementation conforms to, as a string.
|
Python language version that this implementation conforms to, as a string.
|
||||||
|
40
docs/library/zlib.rst
Normal file
40
docs/library/zlib.rst
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
:mod:`zlib` -- zlib decompression
|
||||||
|
=================================
|
||||||
|
|
||||||
|
.. include:: ../templates/unsupported_in_circuitpython.inc
|
||||||
|
|
||||||
|
.. module:: zlib
|
||||||
|
:synopsis: zlib decompression
|
||||||
|
|
||||||
|
|see_cpython_module| :mod:`cpython:zlib`.
|
||||||
|
|
||||||
|
This module allows to decompress binary data compressed with
|
||||||
|
`DEFLATE algorithm <https://en.wikipedia.org/wiki/DEFLATE>`_
|
||||||
|
(commonly used in zlib library and gzip archiver). Compression
|
||||||
|
is not yet implemented.
|
||||||
|
|
||||||
|
Functions
|
||||||
|
---------
|
||||||
|
|
||||||
|
.. function:: decompress(data, wbits=0, bufsize=0, /)
|
||||||
|
|
||||||
|
Return decompressed *data* as bytes. *wbits* is DEFLATE dictionary window
|
||||||
|
size used during compression (8-15, the dictionary size is power of 2 of
|
||||||
|
that value). Additionally, if value is positive, *data* is assumed to be
|
||||||
|
zlib stream (with zlib header). Otherwise, if it's negative, it's assumed
|
||||||
|
to be raw DEFLATE stream. *bufsize* parameter is for compatibility with
|
||||||
|
CPython and is ignored.
|
||||||
|
|
||||||
|
.. class:: DecompIO(stream, wbits=0, /)
|
||||||
|
|
||||||
|
Create a ``stream`` wrapper which allows transparent decompression of
|
||||||
|
compressed data in another *stream*. This allows to process compressed
|
||||||
|
streams with data larger than available heap size. In addition to
|
||||||
|
values described in :func:`decompress`, *wbits* may take values
|
||||||
|
24..31 (16 + 8..15), meaning that input stream has gzip header.
|
||||||
|
|
||||||
|
.. admonition:: Difference to CPython
|
||||||
|
:class: attention
|
||||||
|
|
||||||
|
This class is MicroPython extension. It's included on provisional
|
||||||
|
basis and may be changed considerably or removed in later versions.
|
60
docs/pdf.rst
60
docs/pdf.rst
@ -1,60 +0,0 @@
|
|||||||
:orphan:
|
|
||||||
|
|
||||||
Adafruit CircuitPython API Reference
|
|
||||||
====================================
|
|
||||||
|
|
||||||
Welcome to the API reference documentation for Adafruit CircuitPython.
|
|
||||||
This contains low-level API reference docs which may link out to separate
|
|
||||||
*"getting started"* guides. `Adafruit <https://adafruit.com>`_ has many
|
|
||||||
excellent tutorials available through the
|
|
||||||
`Adafruit Learning System <https://learn.adafruit.com/>`_.
|
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 3
|
|
||||||
|
|
||||||
../README.rst
|
|
||||||
libraries.rst
|
|
||||||
workflows
|
|
||||||
environment.rst
|
|
||||||
troubleshooting.rst
|
|
||||||
../CONTRIBUTING
|
|
||||||
../BUILDING
|
|
||||||
../WEBUSB_README
|
|
||||||
supported_ports.rst
|
|
||||||
|
|
||||||
Design and porting reference
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
design_guide
|
|
||||||
porting
|
|
||||||
common_hal
|
|
||||||
|
|
||||||
API Reference
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:glob:
|
|
||||||
:maxdepth: 3
|
|
||||||
|
|
||||||
library/index.rst
|
|
||||||
../shared-bindings/*/index
|
|
||||||
../shared-bindings/help
|
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
reference/glossary.rst
|
|
||||||
../CODE_OF_CONDUCT
|
|
||||||
../docs/LICENSE
|
|
||||||
|
|
||||||
Indices and tables
|
|
||||||
==================
|
|
||||||
|
|
||||||
* :ref:`genindex`
|
|
||||||
* :ref:`modindex`
|
|
||||||
* :ref:`search`
|
|
@ -71,7 +71,7 @@ as a natural "TODO" list. An example minimal build list is shown below:
|
|||||||
CIRCUITPY_SDCARDIO = 0
|
CIRCUITPY_SDCARDIO = 0
|
||||||
CIRCUITPY_FRAMEBUFFERIO = 0
|
CIRCUITPY_FRAMEBUFFERIO = 0
|
||||||
CIRCUITPY_FREQUENCYIO = 0
|
CIRCUITPY_FREQUENCYIO = 0
|
||||||
CIRCUITPY_I2CTARGET = 0
|
CIRCUITPY_I2CPERIPHERAL = 0
|
||||||
# Requires SPI, PulseIO (stub ok):
|
# Requires SPI, PulseIO (stub ok):
|
||||||
CIRCUITPY_DISPLAYIO = 0
|
CIRCUITPY_DISPLAYIO = 0
|
||||||
|
|
||||||
@ -79,6 +79,8 @@ as a natural "TODO" list. An example minimal build list is shown below:
|
|||||||
# any port once their prerequisites in common-hal are complete.
|
# any port once their prerequisites in common-hal are complete.
|
||||||
# Requires DigitalIO:
|
# Requires DigitalIO:
|
||||||
CIRCUITPY_BITBANGIO = 0
|
CIRCUITPY_BITBANGIO = 0
|
||||||
|
# Requires DigitalIO
|
||||||
|
CIRCUITPY_GAMEPADSHIFT = 0
|
||||||
# Requires neopixel_write or SPI (dotstar)
|
# Requires neopixel_write or SPI (dotstar)
|
||||||
CIRCUITPY_PIXELBUF = 0
|
CIRCUITPY_PIXELBUF = 0
|
||||||
# Requires OS
|
# Requires OS
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
index.rst README.html
|
|
||||||
shared-bindings//__init__.rst shared-bindings//
|
shared-bindings//__init__.rst shared-bindings//
|
||||||
shared-bindings/_bleio/Adapter.rst shared-bindings/_bleio/#_bleio.Adapter
|
shared-bindings/_bleio/Adapter.rst shared-bindings/_bleio/#_bleio.Adapter
|
||||||
shared-bindings/_bleio/Address.rst shared-bindings/_bleio/#_bleio.Address
|
shared-bindings/_bleio/Address.rst shared-bindings/_bleio/#_bleio.Address
|
||||||
@ -46,10 +45,12 @@ shared-bindings/audiomp3/__init__.rst shared-bindings/audiomp3/
|
|||||||
shared-bindings/audiopwmio/PWMAudioOut.rst shared-bindings/audiopwmio/#audiopwmio.PWMAudioOut
|
shared-bindings/audiopwmio/PWMAudioOut.rst shared-bindings/audiopwmio/#audiopwmio.PWMAudioOut
|
||||||
shared-bindings/audiopwmio/__init__.rst shared-bindings/audiopwmio/
|
shared-bindings/audiopwmio/__init__.rst shared-bindings/audiopwmio/
|
||||||
shared-bindings/bitbangio/I2C.rst shared-bindings/bitbangio/#bitbangio.I2C
|
shared-bindings/bitbangio/I2C.rst shared-bindings/bitbangio/#bitbangio.I2C
|
||||||
|
shared-bindings/bitbangio/OneWire.rst shared-bindings/bitbangio/#bitbangio.OneWire
|
||||||
shared-bindings/bitbangio/SPI.rst shared-bindings/bitbangio/#bitbangio.SPI
|
shared-bindings/bitbangio/SPI.rst shared-bindings/bitbangio/#bitbangio.SPI
|
||||||
shared-bindings/bitbangio/__init__.rst shared-bindings/bitbangio/
|
shared-bindings/bitbangio/__init__.rst shared-bindings/bitbangio/
|
||||||
shared-bindings/board/__init__.rst shared-bindings/board/
|
shared-bindings/board/__init__.rst shared-bindings/board/
|
||||||
shared-bindings/busio/I2C.rst shared-bindings/busio/#busio.I2C
|
shared-bindings/busio/I2C.rst shared-bindings/busio/#busio.I2C
|
||||||
|
shared-bindings/busio/OneWire.rst shared-bindings/busio/#busio.OneWire
|
||||||
shared-bindings/busio/Parity.rst shared-bindings/busio/#busio.Parity
|
shared-bindings/busio/Parity.rst shared-bindings/busio/#busio.Parity
|
||||||
shared-bindings/busio/SPI.rst shared-bindings/busio/#busio.SPI
|
shared-bindings/busio/SPI.rst shared-bindings/busio/#busio.SPI
|
||||||
shared-bindings/busio/UART.rst shared-bindings/busio/#busio.UART
|
shared-bindings/busio/UART.rst shared-bindings/busio/#busio.UART
|
||||||
@ -81,23 +82,25 @@ shared-bindings/framebufferio/FramebufferDisplay.rst shared-bindings/framebuffer
|
|||||||
shared-bindings/framebufferio/__init__.rst shared-bindings/framebufferio/
|
shared-bindings/framebufferio/__init__.rst shared-bindings/framebufferio/
|
||||||
shared-bindings/frequencyio/FrequencyIn.rst shared-bindings/frequencyio/#frequencyio.FrequencyIn
|
shared-bindings/frequencyio/FrequencyIn.rst shared-bindings/frequencyio/#frequencyio.FrequencyIn
|
||||||
shared-bindings/frequencyio/__init__.rst shared-bindings/frequencyio/
|
shared-bindings/frequencyio/__init__.rst shared-bindings/frequencyio/
|
||||||
|
shared-bindings/gamepad/GamePad.rst shared-bindings/gamepad/#gamepad.GamePad
|
||||||
|
shared-bindings/gamepad/__init__.rst shared-bindings/gamepad/
|
||||||
|
shared-bindings/gamepadshift/GamePadShift.rst shared-bindings/gamepadshift/#gamepadshift.GamePadShift
|
||||||
|
shared-bindings/gamepadshift/__init__.rst shared-bindings/gamepadshift/
|
||||||
shared-bindings/gnss/__init__.rst shared-bindings/gnss/
|
shared-bindings/gnss/__init__.rst shared-bindings/gnss/
|
||||||
shared-bindings/i2cperipheral/__init__.rst shared-bindings/i2cperipheral/
|
shared-bindings/i2cperipheral/__init__.rst shared-bindings/i2cperipheral/
|
||||||
shared-bindings/i2csecondary/__init__.rst shared-bindings/i2csecondary/
|
shared-bindings/i2csecondary/__init__.rst shared-bindings/i2csecondary/
|
||||||
shared-bindings/i2cslave/I2CSlave.rst shared-bindings/i2cperipheral/#i2ctarget.Target
|
shared-bindings/i2cslave/I2CSlave.rst shared-bindings/i2cperipheral/#i2cperipheral.I2CPeripheral
|
||||||
shared-bindings/i2cslave/I2CSlaveRequest.rst shared-bindings/i2cperipheral/#i2ctarget.I2CTargetRequest
|
shared-bindings/i2cslave/I2CSlaveRequest.rst shared-bindings/i2cperipheral/#i2cperipheral.I2CPeripheralRequest
|
||||||
shared-bindings/i2cperipheral/I2CPeripheral.rst shared-bindings/i2ctarget/#i2ctarget.I2CTarget
|
|
||||||
shared-bindings/i2cperipheral/I2CPeripheralRequest.rst shared-bindings/i2ctarget/#i2ctarget.I2CTargetRequest
|
|
||||||
shared-bindings/math/__init__.rst shared-bindings/math/
|
shared-bindings/math/__init__.rst shared-bindings/math/
|
||||||
shared-bindings/microcontroller/Pin.rst shared-bindings/microcontroller/#microcontroller.Pin
|
shared-bindings/microcontroller/Pin.rst shared-bindings/microcontroller/#microcontroller.Pin
|
||||||
shared-bindings/microcontroller/Processor.rst shared-bindings/microcontroller/#microcontroller.Processor
|
shared-bindings/microcontroller/Processor.rst shared-bindings/microcontroller/#microcontroller.Processor
|
||||||
shared-bindings/microcontroller/RunMode.rst shared-bindings/microcontroller/#microcontroller.RunMode
|
shared-bindings/microcontroller/RunMode.rst shared-bindings/microcontroller/#microcontroller.RunMode
|
||||||
shared-bindings/microcontroller/__init__.rst shared-bindings/microcontroller/
|
shared-bindings/microcontroller/__init__.rst shared-bindings/microcontroller/
|
||||||
|
shared-bindings/multiterminal/__init__.rst shared-bindings/multiterminal/
|
||||||
shared-bindings/neopixel_write/__init__.rst shared-bindings/neopixel_write/
|
shared-bindings/neopixel_write/__init__.rst shared-bindings/neopixel_write/
|
||||||
shared-bindings/network/__init__.rst shared-bindings/network/
|
shared-bindings/network/__init__.rst shared-bindings/network/
|
||||||
shared-bindings/nvm/ByteArray.rst shared-bindings/nvm/#nvm.ByteArray
|
shared-bindings/nvm/ByteArray.rst shared-bindings/nvm/#nvm.ByteArray
|
||||||
shared-bindings/nvm/__init__.rst shared-bindings/nvm/
|
shared-bindings/nvm/__init__.rst shared-bindings/nvm/
|
||||||
shared-bindings/onewireio/OneWire.rst shared-bindings/onewireio/#onewireio.OneWire
|
|
||||||
shared-bindings/os/__init__.rst shared-bindings/os/
|
shared-bindings/os/__init__.rst shared-bindings/os/
|
||||||
shared-bindings/protomatter/__init__.rst shared-bindings/protomatter/
|
shared-bindings/protomatter/__init__.rst shared-bindings/protomatter/
|
||||||
shared-bindings/ps2io/Ps2.rst shared-bindings/ps2io/#ps2io.Ps2
|
shared-bindings/ps2io/Ps2.rst shared-bindings/ps2io/#ps2io.Ps2
|
||||||
|
@ -32,7 +32,7 @@ Glossary
|
|||||||
|
|
||||||
callee-owned tuple
|
callee-owned tuple
|
||||||
This is a MicroPython-specific construct where, for efficiency
|
This is a MicroPython-specific construct where, for efficiency
|
||||||
reasons, some built-in functions or methods may reuse the same
|
reasons, some built-in functions or methods may re-use the same
|
||||||
underlying tuple object to return data. This avoids having to allocate
|
underlying tuple object to return data. This avoids having to allocate
|
||||||
a new tuple for every call, and reduces heap fragmentation. Programs
|
a new tuple for every call, and reduces heap fragmentation. Programs
|
||||||
should not hold references to callee-owned tuples and instead only
|
should not hold references to callee-owned tuples and instead only
|
||||||
@ -52,7 +52,7 @@ Glossary
|
|||||||
cross-compiler
|
cross-compiler
|
||||||
Also known as ``mpy-cross``. This tool runs on your PC and converts a
|
Also known as ``mpy-cross``. This tool runs on your PC and converts a
|
||||||
:term:`.py file` containing MicroPython code into a :term:`.mpy file`
|
:term:`.py file` containing MicroPython code into a :term:`.mpy file`
|
||||||
containing MicroPython :term:`bytecode`. This means it loads faster (the board
|
containing MicroPython bytecode. This means it loads faster (the board
|
||||||
doesn't have to compile the code), and uses less space on flash (the
|
doesn't have to compile the code), and uses less space on flash (the
|
||||||
bytecode is more space efficient).
|
bytecode is more space efficient).
|
||||||
|
|
||||||
@ -112,24 +112,6 @@ Glossary
|
|||||||
require much less power. MicroPython is designed to be small and
|
require much less power. MicroPython is designed to be small and
|
||||||
optimized enough to run on an average modern microcontroller.
|
optimized enough to run on an average modern microcontroller.
|
||||||
|
|
||||||
micropython-lib
|
|
||||||
MicroPython is (usually) distributed as a single executable/binary
|
|
||||||
file with just few builtin modules. There is no extensive standard
|
|
||||||
library comparable with :term:`CPython`'s. Instead, there is a related,
|
|
||||||
but separate project `micropython-lib
|
|
||||||
<https://github.com/micropython/micropython-lib>`_ which provides
|
|
||||||
implementations for many modules from CPython's standard library.
|
|
||||||
|
|
||||||
Some of the modules are implemented in pure Python, and are able to
|
|
||||||
be used on all ports. However, the majority of these modules use
|
|
||||||
:term:`FFI` to access operating system functionality, and as such can
|
|
||||||
only be used on the :term:`MicroPython Unix port` (with limited support
|
|
||||||
for Windows).
|
|
||||||
|
|
||||||
Unlike the :term:`CPython` stdlib, micropython-lib modules are
|
|
||||||
intended to be installed individually - either using manual copying or
|
|
||||||
using :term:`mip`.
|
|
||||||
|
|
||||||
MicroPython port
|
MicroPython port
|
||||||
MicroPython supports different :term:`boards <board>`, RTOSes, and
|
MicroPython supports different :term:`boards <board>`, RTOSes, and
|
||||||
OSes, and can be relatively easily adapted to new systems. MicroPython
|
OSes, and can be relatively easily adapted to new systems. MicroPython
|
||||||
@ -151,26 +133,16 @@ Glossary
|
|||||||
machine-independent features. It can also function in a similar way to
|
machine-independent features. It can also function in a similar way to
|
||||||
:term:`CPython`'s ``python`` executable.
|
:term:`CPython`'s ``python`` executable.
|
||||||
|
|
||||||
mip
|
|
||||||
A package installer for MicroPython (mip - "mip installs packages"). It
|
|
||||||
installs MicroPython packages either from :term:`micropython-lib`,
|
|
||||||
GitHub, or arbitrary URLs. mip can be used on-device on
|
|
||||||
network-capable boards, and internally by tools such
|
|
||||||
as :term:`mpremote`.
|
|
||||||
|
|
||||||
mpremote
|
|
||||||
A tool for interacting with a MicroPython device.
|
|
||||||
|
|
||||||
.mpy file
|
.mpy file
|
||||||
The output of the :term:`cross-compiler`. A compiled form of a
|
The output of the :term:`cross-compiler`. A compiled form of a
|
||||||
:term:`.py file` that contains MicroPython :term:`bytecode` instead of
|
:term:`.py file` that contains MicroPython bytecode instead of Python
|
||||||
Python source code.
|
source code.
|
||||||
|
|
||||||
native
|
native
|
||||||
Usually refers to "native code", i.e. machine code for the target
|
Usually refers to "native code", i.e. machine code for the target
|
||||||
microcontroller (such as ARM Thumb, Xtensa, x86/x64). The ``@native``
|
microcontroller (such as ARM Thumb, Xtensa, x86/x64). The ``@native``
|
||||||
decorator can be applied to a MicroPython function to generate native
|
decorator can be applied to a MicroPython function to generate native
|
||||||
code instead of :term:`bytecode` for that function, which will likely be
|
code instead of bytecode for that function, which will likely be
|
||||||
faster but use more RAM.
|
faster but use more RAM.
|
||||||
|
|
||||||
port
|
port
|
||||||
@ -187,7 +159,7 @@ Glossary
|
|||||||
typically accessible on a host PC via USB.
|
typically accessible on a host PC via USB.
|
||||||
|
|
||||||
stream
|
stream
|
||||||
Also known as a "file-like object". A Python object which provides
|
Also known as a "file-like object". An Python object which provides
|
||||||
sequential read-write access to the underlying data. A stream object
|
sequential read-write access to the underlying data. A stream object
|
||||||
implements a corresponding interface, which consists of methods like
|
implements a corresponding interface, which consists of methods like
|
||||||
``read()``, ``write()``, ``readinto()``, ``seek()``, ``flush()``,
|
``read()``, ``write()``, ``readinto()``, ``seek()``, ``flush()``,
|
||||||
@ -201,12 +173,3 @@ Glossary
|
|||||||
peripheral that sends data over a pair of pins (TX & RX). Many boards
|
peripheral that sends data over a pair of pins (TX & RX). Many boards
|
||||||
include a way to make at least one of the UARTs available to a host PC
|
include a way to make at least one of the UARTs available to a host PC
|
||||||
as a serial port over USB.
|
as a serial port over USB.
|
||||||
|
|
||||||
upip
|
|
||||||
A now-obsolete package manager for MicroPython, inspired
|
|
||||||
by :term:`CPython`'s pip, but much smaller and with reduced
|
|
||||||
functionality. See its replacement, :term:`mip`.
|
|
||||||
|
|
||||||
webrepl
|
|
||||||
A way of connecting to the REPL (and transferring files) on a device
|
|
||||||
over the internet from a browser. See https://micropython.org/webrepl
|
|
||||||
|
@ -1,13 +1,6 @@
|
|||||||
# Derived from code on Eric Holscher's blog, found at:
|
# Derived from code on Eric Holscher's blog, found at:
|
||||||
# https://www.ericholscher.com/blog/2016/jul/25/integrating-jinja-rst-sphinx/
|
# https://www.ericholscher.com/blog/2016/jul/25/integrating-jinja-rst-sphinx/
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
def render_with_jinja(docname, source):
|
|
||||||
if re.search('^\s*.. jinja$', source[0], re.M):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def rstjinja(app, docname, source):
|
def rstjinja(app, docname, source):
|
||||||
"""
|
"""
|
||||||
Render our pages as a jinja template for fancy templating goodness.
|
Render our pages as a jinja template for fancy templating goodness.
|
||||||
@ -16,12 +9,12 @@ def rstjinja(app, docname, source):
|
|||||||
if app.builder.format not in ("html", "latex"):
|
if app.builder.format not in ("html", "latex"):
|
||||||
return
|
return
|
||||||
|
|
||||||
# we only want specific files to run through this func
|
# we only want our one jinja template to run through this func
|
||||||
if not render_with_jinja(docname, source):
|
if "shared-bindings/support_matrix" not in docname:
|
||||||
return
|
return
|
||||||
|
|
||||||
src = rendered = source[0]
|
src = rendered = source[0]
|
||||||
print(f"rendering {docname} as jinja templates")
|
print(docname)
|
||||||
|
|
||||||
if app.builder.format == "html":
|
if app.builder.format == "html":
|
||||||
rendered = app.builder.templates.render_string(
|
rendered = app.builder.templates.render_string(
|
||||||
|
@ -27,24 +27,12 @@ import pathlib
|
|||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import functools
|
|
||||||
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
SUPPORTED_PORTS = [
|
SUPPORTED_PORTS = ['atmel-samd', 'broadcom', 'cxd56', 'espressif', 'litex', 'mimxrt10xx', 'nrf', 'raspberrypi', 'stm']
|
||||||
"atmel-samd",
|
|
||||||
"broadcom",
|
|
||||||
"cxd56",
|
|
||||||
"espressif",
|
|
||||||
"litex",
|
|
||||||
"mimxrt10xx",
|
|
||||||
"nrf",
|
|
||||||
"raspberrypi",
|
|
||||||
"silabs",
|
|
||||||
"stm",
|
|
||||||
]
|
|
||||||
|
|
||||||
ALIASES_BY_BOARD = {
|
aliases_by_board = {
|
||||||
"circuitplayground_express": [
|
"circuitplayground_express": [
|
||||||
"circuitplayground_express_4h",
|
"circuitplayground_express_4h",
|
||||||
"circuitplayground_express_digikey_pycon2019",
|
"circuitplayground_express_digikey_pycon2019",
|
||||||
@ -52,133 +40,110 @@ ALIASES_BY_BOARD = {
|
|||||||
"pybadge": ["edgebadge"],
|
"pybadge": ["edgebadge"],
|
||||||
"pyportal": ["pyportal_pynt"],
|
"pyportal": ["pyportal_pynt"],
|
||||||
"gemma_m0": ["gemma_m0_pycon2018"],
|
"gemma_m0": ["gemma_m0_pycon2018"],
|
||||||
|
"pewpew10": ["pewpew13"],
|
||||||
}
|
}
|
||||||
|
|
||||||
ALIASES_BRAND_NAMES = {
|
aliases_brand_names = {
|
||||||
"circuitplayground_express_4h": "Adafruit Circuit Playground Express 4-H",
|
"circuitplayground_express_4h":
|
||||||
"circuitplayground_express_digikey_pycon2019": "Circuit Playground Express Digi-Key PyCon 2019",
|
"Adafruit Circuit Playground Express 4-H",
|
||||||
"edgebadge": "Adafruit EdgeBadge",
|
"circuitplayground_express_digikey_pycon2019":
|
||||||
"pyportal_pynt": "Adafruit PyPortal Pynt",
|
"Circuit Playground Express Digi-Key PyCon 2019",
|
||||||
"gemma_m0_pycon2018": "Adafruit Gemma M0 PyCon 2018",
|
"edgebadge":
|
||||||
|
"Adafruit EdgeBadge",
|
||||||
|
"pyportal_pynt":
|
||||||
|
"Adafruit PyPortal Pynt",
|
||||||
|
"gemma_m0_pycon2018":
|
||||||
|
"Adafruit Gemma M0 PyCon 2018",
|
||||||
|
"pewpew13":
|
||||||
|
"PewPew 13",
|
||||||
}
|
}
|
||||||
|
|
||||||
ADDITIONAL_MODULES = {
|
additional_modules = {
|
||||||
"_asyncio": "MICROPY_PY_ASYNCIO",
|
|
||||||
"adafruit_bus_device": "CIRCUITPY_BUSDEVICE",
|
|
||||||
"adafruit_pixelbuf": "CIRCUITPY_PIXELBUF",
|
|
||||||
"array": "CIRCUITPY_ARRAY",
|
|
||||||
# always available, so depend on something that's always 1.
|
|
||||||
"builtins": "CIRCUITPY",
|
|
||||||
"builtins.pow3": "CIRCUITPY_BUILTINS_POW3",
|
|
||||||
"busio.SPI": "CIRCUITPY_BUSIO_SPI",
|
|
||||||
"busio.UART": "CIRCUITPY_BUSIO_UART",
|
|
||||||
"collections": "CIRCUITPY_COLLECTIONS",
|
|
||||||
"fontio": "CIRCUITPY_DISPLAYIO",
|
"fontio": "CIRCUITPY_DISPLAYIO",
|
||||||
"io": "CIRCUITPY_IO",
|
|
||||||
"keypad.KeyMatrix": "CIRCUITPY_KEYPAD_KEYMATRIX",
|
|
||||||
"keypad.Keys": "CIRCUITPY_KEYPAD_KEYS",
|
|
||||||
"keypad.ShiftRegisterKeys": "CIRCUITPY_KEYPAD_SHIFTREGISTERKEYS",
|
|
||||||
"os.getenv": "CIRCUITPY_OS_GETENV",
|
|
||||||
"select": "MICROPY_PY_SELECT_SELECT",
|
|
||||||
"sys": "CIRCUITPY_SYS",
|
|
||||||
"terminalio": "CIRCUITPY_DISPLAYIO",
|
"terminalio": "CIRCUITPY_DISPLAYIO",
|
||||||
"usb": "CIRCUITPY_USB_HOST",
|
"adafruit_bus_device": "CIRCUITPY_BUSDEVICE",
|
||||||
|
"adafruit_pixelbuf": "CIRCUITPY_PIXELBUF"
|
||||||
}
|
}
|
||||||
|
|
||||||
MODULES_NOT_IN_BINDINGS = [ "binascii", "errno", "json", "re", "ulab" ]
|
|
||||||
|
|
||||||
FROZEN_EXCLUDES = ["examples", "docs", "tests", "utils", "conf.py", "setup.py"]
|
|
||||||
"""Files and dirs at the root of a frozen directory that should be ignored.
|
|
||||||
This is the same list as in the preprocess_frozen_modules script."""
|
|
||||||
|
|
||||||
repository_urls = {}
|
|
||||||
"""Cache of repository URLs for frozen modules."""
|
|
||||||
|
|
||||||
root_dir = pathlib.Path(__file__).resolve().parent.parent
|
|
||||||
|
|
||||||
|
|
||||||
def get_circuitpython_root_dir():
|
def get_circuitpython_root_dir():
|
||||||
"""The path to the root './circuitpython' directory."""
|
""" The path to the root './circuitpython' directory
|
||||||
|
"""
|
||||||
|
file_path = pathlib.Path(__file__).resolve()
|
||||||
|
root_dir = file_path.parent.parent
|
||||||
|
|
||||||
return root_dir
|
return root_dir
|
||||||
|
|
||||||
|
def get_shared_bindings():
|
||||||
def get_bindings():
|
""" Get a list of modules in shared-bindings based on folder names
|
||||||
"""Get a list of modules in shared-bindings and ports/*/bindings based on folder names."""
|
|
||||||
shared_bindings_modules = [
|
|
||||||
module.name
|
|
||||||
for module in (get_circuitpython_root_dir() / "shared-bindings").iterdir()
|
|
||||||
if module.is_dir()
|
|
||||||
]
|
|
||||||
bindings_modules = []
|
|
||||||
for d in get_circuitpython_root_dir().glob("ports/*/bindings"):
|
|
||||||
bindings_modules.extend(module.name for module in d.iterdir() if d.is_dir())
|
|
||||||
return shared_bindings_modules + bindings_modules + MODULES_NOT_IN_BINDINGS + list(ADDITIONAL_MODULES.keys())
|
|
||||||
|
|
||||||
|
|
||||||
def get_board_mapping():
|
|
||||||
"""
|
"""
|
||||||
Compiles the list of boards from the directories, with aliases and mapping
|
shared_bindings_dir = get_circuitpython_root_dir() / "shared-bindings"
|
||||||
to the port.
|
return [item.name for item in shared_bindings_dir.iterdir()] + ["binascii", "errno", "json", "re", "ulab"]
|
||||||
|
|
||||||
|
|
||||||
|
def read_mpconfig():
|
||||||
|
""" Open 'circuitpy_mpconfig.mk' and return the contents.
|
||||||
"""
|
"""
|
||||||
boards = {}
|
configs = []
|
||||||
for port in SUPPORTED_PORTS:
|
cpy_mpcfg = get_circuitpython_root_dir() / "py" / "circuitpy_mpconfig.mk"
|
||||||
board_path = root_dir / "ports" / port / "boards"
|
with open(cpy_mpcfg) as mpconfig:
|
||||||
for board_path in os.scandir(board_path):
|
configs = mpconfig.read()
|
||||||
if board_path.is_dir():
|
|
||||||
board_files = os.listdir(board_path.path)
|
return configs
|
||||||
board_id = board_path.name
|
|
||||||
aliases = ALIASES_BY_BOARD.get(board_path.name, [])
|
|
||||||
boards[board_id] = {
|
|
||||||
"port": port,
|
|
||||||
"download_count": 0,
|
|
||||||
"aliases": aliases,
|
|
||||||
}
|
|
||||||
for alias in aliases:
|
|
||||||
boards[alias] = {
|
|
||||||
"port": port,
|
|
||||||
"download_count": 0,
|
|
||||||
"alias": True,
|
|
||||||
"aliases": [],
|
|
||||||
}
|
|
||||||
return boards
|
|
||||||
|
|
||||||
|
|
||||||
def build_module_map():
|
def build_module_map():
|
||||||
"""Establish the base of the JSON file, based on the contents from
|
""" Establish the base of the JSON file, based on the contents from
|
||||||
`configs`. Base contains the module name and the controlling C macro name.
|
`configs`. Base will contain module names, if they're part of
|
||||||
|
the `FULL_BUILD`, or their default value (0, 1, or a list of
|
||||||
|
modules that determine default [see audiocore, audiomixer, etc.]).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
base = dict()
|
base = dict()
|
||||||
modules = get_bindings()
|
modules = get_shared_bindings()
|
||||||
|
configs = read_mpconfig()
|
||||||
|
full_build = False
|
||||||
for module in modules:
|
for module in modules:
|
||||||
full_name = module
|
full_name = module
|
||||||
if module in ADDITIONAL_MODULES:
|
if module in additional_modules:
|
||||||
search_identifier = ADDITIONAL_MODULES[module]
|
search_identifier = additional_modules[module]
|
||||||
else:
|
else:
|
||||||
search_identifier = "CIRCUITPY_" + module.lstrip("_").upper()
|
search_identifier = 'CIRCUITPY_'+module.lstrip("_").upper()
|
||||||
|
re_pattern = f"{re.escape(search_identifier)}\s*\??=\s*(.+)"
|
||||||
|
find_config = re.findall(re_pattern, configs)
|
||||||
|
if not find_config:
|
||||||
|
continue
|
||||||
|
find_config = ", ".join([x.strip("$()") for x in find_config])
|
||||||
|
|
||||||
|
full_build = int("CIRCUITPY_FULL_BUILD" in find_config)
|
||||||
|
if not full_build:
|
||||||
|
default_val = find_config
|
||||||
|
else:
|
||||||
|
default_val = "None"
|
||||||
|
|
||||||
base[module] = {
|
base[module] = {
|
||||||
"name": full_name,
|
"name": full_name,
|
||||||
|
"full_build": str(full_build),
|
||||||
|
"default_value": default_val,
|
||||||
|
"excluded": {},
|
||||||
"key": search_identifier,
|
"key": search_identifier,
|
||||||
}
|
}
|
||||||
|
|
||||||
return base
|
return base
|
||||||
|
|
||||||
|
|
||||||
def get_settings_from_makefile(port_dir, board_name):
|
def get_settings_from_makefile(port_dir, board_name):
|
||||||
"""Invoke make to print the value of critical build settings
|
""" Invoke make in a mode which prints the database, then parse it for
|
||||||
|
settings.
|
||||||
|
|
||||||
This means that the effect of all Makefile directives is taken
|
This means that the effect of all Makefile directives is taken
|
||||||
into account, without having to re-encode the logic that sets them
|
into account, without having to re-encode the logic that sets them
|
||||||
in this script, something that has proved error-prone
|
in this script, something that has proved error-prone
|
||||||
|
|
||||||
This list must explicitly include any setting queried by tools/ci_set_matrix.py.
|
|
||||||
"""
|
"""
|
||||||
contents = subprocess.run(
|
contents = subprocess.run(
|
||||||
["make", "-C", port_dir, "-f", "Makefile", f"BOARD={board_name}", "print-CFLAGS", "print-CIRCUITPY_BUILD_EXTENSIONS", "print-FROZEN_MPY_DIRS", "print-SRC_PATTERNS", "print-SRC_SUPERVISOR"],
|
["make", "-C", port_dir, f"BOARD={board_name}", "-qp", "print-CC"],
|
||||||
encoding="utf-8",
|
encoding="utf-8",
|
||||||
errors="replace",
|
errors="replace",
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.PIPE,
|
stderr=subprocess.PIPE
|
||||||
)
|
)
|
||||||
# Make signals errors with exit status 2; 0 and 1 are "non-error" statuses
|
# Make signals errors with exit status 2; 0 and 1 are "non-error" statuses
|
||||||
if contents.returncode not in (0, 1):
|
if contents.returncode not in (0, 1):
|
||||||
@ -189,110 +154,33 @@ def get_settings_from_makefile(port_dir, board_name):
|
|||||||
raise RuntimeError(error_msg)
|
raise RuntimeError(error_msg)
|
||||||
|
|
||||||
settings = {}
|
settings = {}
|
||||||
for line in contents.stdout.split("\n"):
|
for line in contents.stdout.split('\n'):
|
||||||
if line.startswith('CFLAGS ='):
|
# Handle both = and := definitions.
|
||||||
for m in re.findall(r'-D([A-Z][A-Z0-9_]*)=(\d+)', line):
|
m = re.match(r'^([A-Z][A-Z0-9_]*) :?= (.*)$', line)
|
||||||
settings[m[0]] = m[1]
|
if m:
|
||||||
elif m := re.match(r"^([A-Z][A-Z0-9_]*) = (.*)$", line):
|
|
||||||
settings[m.group(1)] = m.group(2)
|
settings[m.group(1)] = m.group(2)
|
||||||
|
|
||||||
return settings
|
return settings
|
||||||
|
|
||||||
|
def lookup_setting(settings, key, default=''):
|
||||||
def get_repository_url(directory):
|
|
||||||
if directory in repository_urls:
|
|
||||||
return repository_urls[directory]
|
|
||||||
readme = None
|
|
||||||
for readme_path in (
|
|
||||||
os.path.join(directory, "README.rst"),
|
|
||||||
os.path.join(os.path.dirname(directory), "README.rst"),
|
|
||||||
):
|
|
||||||
if os.path.exists(readme_path):
|
|
||||||
readme = readme_path
|
|
||||||
break
|
|
||||||
path = None
|
|
||||||
if readme:
|
|
||||||
with open(readme, "r") as fp:
|
|
||||||
for line in fp.readlines():
|
|
||||||
if m := re.match(
|
|
||||||
r"\s+:target:\s+(http\S+(docs.circuitpython|readthedocs)\S+)\s*",
|
|
||||||
line,
|
|
||||||
):
|
|
||||||
path = m.group(1)
|
|
||||||
break
|
|
||||||
if m := re.search(r"<(http[^>]+)>", line):
|
|
||||||
path = m.group(1)
|
|
||||||
break
|
|
||||||
if path is None:
|
|
||||||
contents = subprocess.run(
|
|
||||||
["git", "remote", "get-url", "origin"],
|
|
||||||
encoding="utf-8",
|
|
||||||
errors="replace",
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.PIPE,
|
|
||||||
cwd=directory,
|
|
||||||
)
|
|
||||||
path = contents.stdout.strip()
|
|
||||||
repository_urls[directory] = path
|
|
||||||
return path
|
|
||||||
|
|
||||||
def remove_prefix(s, prefix):
|
|
||||||
if not s.startswith(prefix):
|
|
||||||
raise ValueError(f"{s=} does not start with {prefix=}")
|
|
||||||
return s.removeprefix(prefix)
|
|
||||||
|
|
||||||
def frozen_modules_from_dirs(frozen_mpy_dirs, withurl):
|
|
||||||
"""
|
|
||||||
Go through the list of frozen directories and extract the python modules.
|
|
||||||
Paths are of the type:
|
|
||||||
$(TOP)/frozen/Adafruit_CircuitPython_CircuitPlayground
|
|
||||||
$(TOP)/frozen/circuitpython-stage/meowbit
|
|
||||||
Python modules are at the root of the path, and are python files or directories
|
|
||||||
containing python files. Except the ones in the FROZEN_EXCLUDES list.
|
|
||||||
"""
|
|
||||||
frozen_modules = []
|
|
||||||
for frozen_path in filter(lambda x: x, frozen_mpy_dirs.split(" ")):
|
|
||||||
frozen_path = remove_prefix(frozen_path, '../../')
|
|
||||||
source_dir = get_circuitpython_root_dir() / frozen_path
|
|
||||||
url_repository = get_repository_url(source_dir)
|
|
||||||
for sub in source_dir.glob("*"):
|
|
||||||
if sub.name in FROZEN_EXCLUDES:
|
|
||||||
continue
|
|
||||||
if sub.name.endswith(".py"):
|
|
||||||
if withurl:
|
|
||||||
frozen_modules.append((sub.name[:-3], url_repository))
|
|
||||||
else:
|
|
||||||
frozen_modules.append(sub.name[:-3])
|
|
||||||
continue
|
|
||||||
if next(sub.glob("**/*.py"), None): # tests if not empty
|
|
||||||
if withurl:
|
|
||||||
frozen_modules.append((sub.name, url_repository))
|
|
||||||
else:
|
|
||||||
frozen_modules.append(sub.name)
|
|
||||||
return frozen_modules
|
|
||||||
|
|
||||||
|
|
||||||
def lookup_setting(settings, key, default=""):
|
|
||||||
while True:
|
while True:
|
||||||
value = settings.get(key, default)
|
value = settings.get(key, default)
|
||||||
if not value.startswith("$"):
|
if not value.startswith('$'):
|
||||||
break
|
break
|
||||||
key = value[2:-1]
|
key = value[2:-1]
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
@functools.cache
|
|
||||||
def all_ports_all_boards(ports=SUPPORTED_PORTS):
|
def all_ports_all_boards(ports=SUPPORTED_PORTS):
|
||||||
for port in ports:
|
for port in ports:
|
||||||
|
|
||||||
port_dir = get_circuitpython_root_dir() / "ports" / port
|
port_dir = get_circuitpython_root_dir() / "ports" / port
|
||||||
for entry in (port_dir / "boards").iterdir():
|
for entry in (port_dir / "boards").iterdir():
|
||||||
if not entry.is_dir():
|
if not entry.is_dir():
|
||||||
continue
|
continue
|
||||||
yield (port, entry)
|
yield (port, entry)
|
||||||
|
|
||||||
|
def support_matrix_by_board(use_branded_name=True):
|
||||||
def support_matrix_by_board(use_branded_name=True, withurl=True):
|
""" Compiles a list of the available core modules available for each
|
||||||
"""Compiles a list of the available core modules available for each
|
|
||||||
board.
|
board.
|
||||||
"""
|
"""
|
||||||
base = build_module_map()
|
base = build_module_map()
|
||||||
@ -305,9 +193,8 @@ def support_matrix_by_board(use_branded_name=True, withurl=True):
|
|||||||
if use_branded_name:
|
if use_branded_name:
|
||||||
with open(entry / "mpconfigboard.h") as get_name:
|
with open(entry / "mpconfigboard.h") as get_name:
|
||||||
board_contents = get_name.read()
|
board_contents = get_name.read()
|
||||||
board_name_re = re.search(
|
board_name_re = re.search(r"(?<=MICROPY_HW_BOARD_NAME)\s+(.+)",
|
||||||
r"(?<=MICROPY_HW_BOARD_NAME)\s+(.+)", board_contents
|
board_contents)
|
||||||
)
|
|
||||||
if board_name_re:
|
if board_name_re:
|
||||||
board_name = board_name_re.group(1).strip('"')
|
board_name = board_name_re.group(1).strip('"')
|
||||||
else:
|
else:
|
||||||
@ -315,69 +202,31 @@ def support_matrix_by_board(use_branded_name=True, withurl=True):
|
|||||||
|
|
||||||
board_modules = []
|
board_modules = []
|
||||||
for module in base:
|
for module in base:
|
||||||
key = base[module]["key"]
|
key = base[module]['key']
|
||||||
if int(lookup_setting(settings, key, "0")):
|
if int(lookup_setting(settings, key, '0')):
|
||||||
board_modules.append(base[module]["name"])
|
board_modules.append(base[module]['name'])
|
||||||
board_modules.sort()
|
board_modules.sort()
|
||||||
|
|
||||||
if "CIRCUITPY_BUILD_EXTENSIONS" in settings:
|
|
||||||
board_extensions = [
|
|
||||||
extension.strip()
|
|
||||||
for extension in settings["CIRCUITPY_BUILD_EXTENSIONS"].split(",")
|
|
||||||
]
|
|
||||||
else:
|
|
||||||
raise OSError(f"Board extensions undefined: {board_name}.")
|
|
||||||
|
|
||||||
frozen_modules = []
|
|
||||||
if "FROZEN_MPY_DIRS" in settings:
|
|
||||||
frozen_modules = frozen_modules_from_dirs(
|
|
||||||
settings["FROZEN_MPY_DIRS"], withurl
|
|
||||||
)
|
|
||||||
if frozen_modules:
|
|
||||||
frozen_modules.sort()
|
|
||||||
|
|
||||||
# generate alias boards too
|
# generate alias boards too
|
||||||
board_matrix = [
|
board_matrix = [(board_name, board_modules)]
|
||||||
(
|
if entry.name in aliases_by_board:
|
||||||
board_name,
|
for alias in aliases_by_board[entry.name]:
|
||||||
{
|
|
||||||
"modules": board_modules,
|
|
||||||
"frozen_libraries": frozen_modules,
|
|
||||||
"extensions": board_extensions,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
]
|
|
||||||
if entry.name in ALIASES_BY_BOARD:
|
|
||||||
for alias in ALIASES_BY_BOARD[entry.name]:
|
|
||||||
if use_branded_name:
|
if use_branded_name:
|
||||||
if alias in ALIASES_BRAND_NAMES:
|
if alias in aliases_brand_names:
|
||||||
alias = ALIASES_BRAND_NAMES[alias]
|
alias = aliases_brand_names[alias]
|
||||||
else:
|
else:
|
||||||
alias = alias.replace("_", " ").title()
|
alias = alias.replace("_"," ").title()
|
||||||
board_matrix.append(
|
board_matrix.append( (alias, board_modules) )
|
||||||
(
|
|
||||||
alias,
|
|
||||||
{
|
|
||||||
"modules": board_modules,
|
|
||||||
"frozen_libraries": frozen_modules,
|
|
||||||
"extensions": board_extensions,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return board_matrix # this is now a list of (board,modules)
|
return board_matrix # this is now a list of (board,modules)
|
||||||
|
|
||||||
executor = ThreadPoolExecutor(max_workers=os.cpu_count())
|
executor = ThreadPoolExecutor(max_workers=os.cpu_count())
|
||||||
mapped_exec = executor.map(support_matrix, all_ports_all_boards())
|
mapped_exec = executor.map(support_matrix, all_ports_all_boards())
|
||||||
# flatmap with comprehensions
|
# flatmap with comprehensions
|
||||||
boards = dict(
|
boards = dict(sorted([board for matrix in mapped_exec for board in matrix]))
|
||||||
sorted(
|
|
||||||
[board for matrix in mapped_exec for board in matrix], key=lambda x: x[0]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# print(json.dumps(boards, indent=2))
|
||||||
return boards
|
return boards
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
if __name__ == "__main__":
|
|
||||||
print(json.dumps(support_matrix_by_board(), indent=2))
|
print(json.dumps(support_matrix_by_board(), indent=2))
|
||||||
|
5
docs/static/custom.css
vendored
5
docs/static/custom.css
vendored
@ -1,5 +0,0 @@
|
|||||||
/* Workaround to force Sphinx to render tables to 100% and wordwrap */
|
|
||||||
/* See https://stackoverflow.com/questions/69359978/grid-table-does-not-word-wrap for more details */
|
|
||||||
.wy-table-responsive table td, .wy-table-responsive table th {
|
|
||||||
white-space: inherit;
|
|
||||||
}
|
|
17
docs/static/filter.css
vendored
17
docs/static/filter.css
vendored
@ -7,21 +7,8 @@
|
|||||||
right: 10px;
|
right: 10px;
|
||||||
top: 4px;
|
top: 4px;
|
||||||
}
|
}
|
||||||
|
.support-matrix-table .this_module code,
|
||||||
.support-matrix-table .reference.external {
|
.support-matrix-table .this_module span {
|
||||||
box-sizing: border-box;
|
|
||||||
font-weight: 700;
|
|
||||||
color: #404040;
|
|
||||||
font-family: "SFMono-Regular", Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", Courier, monospace;
|
|
||||||
padding: 2px 5px;
|
|
||||||
background: white;
|
|
||||||
border: 1px solid #e1e4e5;
|
|
||||||
font-size: 75%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.support-matrix-table .this_module,
|
|
||||||
.support-matrix-table .this_module.reference.external,
|
|
||||||
.support-matrix-table .this_module * {
|
|
||||||
background: black;
|
background: black;
|
||||||
color: white;
|
color: white;
|
||||||
}
|
}
|
||||||
|
4
docs/static/filter.js
vendored
4
docs/static/filter.js
vendored
@ -44,14 +44,14 @@ $(() => {
|
|||||||
var nvisible = 0;
|
var nvisible = 0;
|
||||||
$(".support-matrix-table tbody tr").each( (index,item) => {
|
$(".support-matrix-table tbody tr").each( (index,item) => {
|
||||||
var name = $(item).find("td:first-child p").html();
|
var name = $(item).find("td:first-child p").html();
|
||||||
var modules = $(item).find("code, a.reference.external");
|
var modules = $(item).find("a.reference.internal");
|
||||||
var matching_all = true;
|
var matching_all = true;
|
||||||
//
|
//
|
||||||
list_search.forEach((sstring) => {
|
list_search.forEach((sstring) => {
|
||||||
var matching = (sstring[0] == "-");
|
var matching = (sstring[0] == "-");
|
||||||
for(var modi = 0; modi < modules.length; ++modi) {
|
for(var modi = 0; modi < modules.length; ++modi) {
|
||||||
module = modules[modi];
|
module = modules[modi];
|
||||||
var mod_name = module.firstChild.textContent;
|
var mod_name = module.firstChild.firstChild.textContent;
|
||||||
if(sstring[0] == "-") {
|
if(sstring[0] == "-") {
|
||||||
if(mod_name.match(sstring.substr(1))) {
|
if(mod_name.match(sstring.substr(1))) {
|
||||||
matching = false;
|
matching = false;
|
||||||
|
@ -12,13 +12,10 @@ is limited.
|
|||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
../ports/atmel-samd/README
|
../ports/atmel-samd/README
|
||||||
../ports/broadcom/README
|
|
||||||
../ports/cxd56/README
|
../ports/cxd56/README
|
||||||
../ports/espressif/README
|
../ports/espressif/README
|
||||||
../ports/litex/README
|
../ports/litex/README
|
||||||
../ports/mimxrt10xx/README
|
../ports/mimxrt10xx/README
|
||||||
../ports/nrf/README
|
../ports/nrf/README
|
||||||
../ports/raspberrypi/README
|
../ports/raspberrypi/README
|
||||||
../ports/silabs/README
|
|
||||||
../ports/stm/README
|
../ports/stm/README
|
||||||
../ports/unix/README
|
|
||||||
|
@ -1,469 +0,0 @@
|
|||||||
# Workflows
|
|
||||||
|
|
||||||
Workflows are the process used to 1) manipulate files on the CircuitPython device and 2) interact
|
|
||||||
with the serial connection to CircuitPython. The serial connection is usually used to access the
|
|
||||||
REPL.
|
|
||||||
|
|
||||||
Starting with CircuitPython 3.x we moved to a USB-only workflow. Prior to that, we used the serial
|
|
||||||
connection alone to do the whole workflow. In CircuitPython 7.x, a BLE workflow was added with the
|
|
||||||
advantage of working with mobile devices. CircuitPython 8.x added a web workflow that works over the
|
|
||||||
local network (usually Wi-Fi) and a web browser. Other clients can also use the Web REST API. Boards
|
|
||||||
should clearly document which workflows are supported.
|
|
||||||
|
|
||||||
Code for workflows lives in `supervisor/shared`.
|
|
||||||
|
|
||||||
The workflow APIs are documented here.
|
|
||||||
|
|
||||||
## USB
|
|
||||||
|
|
||||||
These USB interfaces are enabled by default on boards with USB support. They are usable once the
|
|
||||||
device has been plugged into a host.
|
|
||||||
|
|
||||||
### CIRCUITPY drive
|
|
||||||
CircuitPython exposes a standard mass storage (MSC) interface to enable file manipulation over a
|
|
||||||
standard interface. This interface works underneath the file system at the block level so using it
|
|
||||||
excludes other types of workflows from manipulating the file system at the same time.
|
|
||||||
|
|
||||||
### CDC serial
|
|
||||||
CircuitPython exposes one CDC USB interface for CircuitPython serial. This is a standard serial
|
|
||||||
USB interface.
|
|
||||||
|
|
||||||
TODO: Document how it designates itself from the user CDC.
|
|
||||||
|
|
||||||
Setting baudrate 1200 and disconnecting will reboot into a bootloader. (Used by Arduino to trigger
|
|
||||||
a reset into bootloader.)
|
|
||||||
|
|
||||||
## BLE
|
|
||||||
|
|
||||||
The BLE workflow is enabled for nRF boards. By default, to prevent malicious access, it is disabled.
|
|
||||||
To connect to the BLE workflow, press the reset button while the status led blinks blue quickly
|
|
||||||
after the safe mode blinks. The board will restart and broadcast the file transfer service UUID
|
|
||||||
(`0xfebb`) along with the board's [Creation IDs](https://github.com/creationid/creators). This
|
|
||||||
public broadcast is done at a lower transmit level so the devices must be closer. On connection, the
|
|
||||||
device will need to pair and bond. Once bonded, the device will broadcast whenever disconnected
|
|
||||||
using a rotating key rather than a static one. Non-bonded devices won't be able to resolve it. After
|
|
||||||
connection, the central device can discover two default services. One for file transfer and one for
|
|
||||||
CircuitPython specifically that includes serial characteristics.
|
|
||||||
|
|
||||||
To change the default BLE advertising name without (or before) running user code, the desired name
|
|
||||||
can be put in the `settings.toml` file. The key is `CIRCUITPY_BLE_NAME`. It's limited to approximately
|
|
||||||
30 characters depending on the port's settings and will be truncated if longer.
|
|
||||||
|
|
||||||
### File Transfer API
|
|
||||||
|
|
||||||
CircuitPython uses [an open File Transfer API](https://github.com/adafruit/Adafruit_CircuitPython_BLE_File_Transfer)
|
|
||||||
to enable file system access.
|
|
||||||
|
|
||||||
### CircuitPython Service
|
|
||||||
|
|
||||||
The base UUID for the CircuitPython service is `ADAFXXXX-4369-7263-7569-7450794686e`. The `XXXX` is
|
|
||||||
replaced by the four specific digits below. The service itself is `0001`.
|
|
||||||
|
|
||||||
#### TX - `0002` / RX - `0003`
|
|
||||||
|
|
||||||
These characteristic work just like the Nordic Uart Service (NUS) but have different UUIDs to prevent
|
|
||||||
conflicts with user created NUS services.
|
|
||||||
|
|
||||||
#### Version - `0100`
|
|
||||||
Read-only characteristic that returns the UTF-8 encoded version string.
|
|
||||||
|
|
||||||
## Web
|
|
||||||
If the keys `CIRCUITPY_WIFI_SSID` and `CIRCUITPY_WIFI_PASSWORD` are set in `settings.toml`,
|
|
||||||
CircuitPython will automatically connect to the given Wi-Fi network on boot and upon reload.
|
|
||||||
|
|
||||||
If `CIRCUITPY_WEB_API_PASSWORD` is also set, the web workflow will also start.
|
|
||||||
The web workflow will only be enabled if the Wi-Fi connection succeeds upon boot.
|
|
||||||
|
|
||||||
The webserver is on port 80 unless overridden by `CIRCUITPY_WEB_API_PORT`. It also enables MDNS.
|
|
||||||
The name of the board as advertised to the network can be overridden by `CIRCUITPY_WEB_INSTANCE_NAME`.
|
|
||||||
|
|
||||||
Here is an example `/settings.toml`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# To auto-connect to Wi-Fi
|
|
||||||
CIRCUITPY_WIFI_SSID="scottswifi"
|
|
||||||
CIRCUITPY_WIFI_PASSWORD="secretpassword"
|
|
||||||
|
|
||||||
# To enable the the webserver. Change this too!
|
|
||||||
# Leave the User field blank in the browser.
|
|
||||||
CIRCUITPY_WEB_API_PASSWORD="passw0rd"
|
|
||||||
|
|
||||||
CIRCUITPY_WEB_API_PORT=80
|
|
||||||
CIRCUITPY_WEB_INSTANCE_NAME=""
|
|
||||||
```
|
|
||||||
|
|
||||||
MDNS is used to resolve [`circuitpython.local`](http://circuitpython.local) to a device specific
|
|
||||||
hostname of the form `cpy-XXXXXX.local`. The `XXXXXX` is based on network MAC address. The device
|
|
||||||
also provides the MDNS service with service type `_circuitpython` and protocol `_tcp`.
|
|
||||||
|
|
||||||
Since port 80 (or the port assigned to `CIRCUITPY_WEB_API_PORT`) is used for web workflow, the `mdns`
|
|
||||||
[module](https://docs.circuitpython.org/en/latest/shared-bindings/mdns/index.html#mdns.Server.advertise_service)
|
|
||||||
can't advertise an additional service on that port.
|
|
||||||
|
|
||||||
### HTTP
|
|
||||||
The web server is HTTP 1.1 and may use chunked responses so that it doesn't need to precompute
|
|
||||||
content length.
|
|
||||||
|
|
||||||
The API generally consists of an HTTP method such as GET or PUT and a path. Requests and responses
|
|
||||||
also have headers. Responses will contain a status code and status text such as `404 Not Found`.
|
|
||||||
This API tries to use standard status codes to encode the status of the various operations. The
|
|
||||||
[Mozilla Developer Network HTTP docs](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP)
|
|
||||||
are a great reference.
|
|
||||||
|
|
||||||
#### Examples
|
|
||||||
The examples use `curl`, a common command line program for issuing HTTP requests. The examples below
|
|
||||||
use `circuitpython.local` as the easiest way to work. If you have multiple active devices, you'll
|
|
||||||
want to use the specific `cpy-XXXXXX.local` version.
|
|
||||||
|
|
||||||
The examples also use `passw0rd` as the password placeholder. Replace it with your password before
|
|
||||||
running the example.
|
|
||||||
|
|
||||||
### `/`
|
|
||||||
The root welcome page links to the file system page and also displays other CircuitPython devices
|
|
||||||
found using MDNS service discovery. This allows web browsers to find other devices from one. (All
|
|
||||||
devices will respond to `circuitpython.local` so the device redirected to may vary.)
|
|
||||||
|
|
||||||
### CORS
|
|
||||||
The web server will allow requests from `cpy-XXXXXX.local`, `127.0.0.1`, the device's IP and
|
|
||||||
`code.circuitpython.org`. (`circuitpython.local` requests will be redirected to `cpy-XXXXXX.local`.)
|
|
||||||
|
|
||||||
### File REST API
|
|
||||||
All file system related APIs are protected by HTTP basic authentication. It is *NOT* secure but will
|
|
||||||
hopefully prevent some griefing in shared settings. The password is sent unencrypted so do not reuse
|
|
||||||
a password with something important. The user field is left blank.
|
|
||||||
|
|
||||||
The password is taken from `settings.toml` with the key `CIRCUITPY_WEB_API_PASSWORD`. If this is unset, the
|
|
||||||
server will respond with `403 Forbidden`. When a password is set, but not provided in a request, it
|
|
||||||
will respond `401 Unauthorized`.
|
|
||||||
|
|
||||||
#### `/fs/`
|
|
||||||
|
|
||||||
The `/fs/` page will respond with a directory browsing HTML once authenticated. This page is always
|
|
||||||
gzipped. If the `Accept: application/json` header is provided, then the JSON representation of the
|
|
||||||
root will be returned.
|
|
||||||
|
|
||||||
##### OPTIONS
|
|
||||||
When requested with the `OPTIONS` method, the server will respond with CORS related headers. Most
|
|
||||||
aren't needed for API use. They are there for the web browser.
|
|
||||||
|
|
||||||
* `Access-Control-Allow-Methods` - Varies with USB state. `GET, OPTIONS` when USB is active. `GET, OPTIONS, PUT, DELETE, MOVE` otherwise.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X OPTIONS -L --location-trusted http://circuitpython.local/fs/
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `/fs/<directory path>/`
|
|
||||||
Directory paths must end with a /. Otherwise, the path is assumed to be a file.
|
|
||||||
|
|
||||||
##### GET
|
|
||||||
Returns a JSON representation of the directory.
|
|
||||||
|
|
||||||
* `200 OK` - Directory exists and JSON returned
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - Missing directory
|
|
||||||
|
|
||||||
Returns information about each file in the directory:
|
|
||||||
|
|
||||||
* `name` - File name. No trailing `/` on directory names
|
|
||||||
* `directory` - `true` when a directory. `false` otherwise
|
|
||||||
* `modified_ns` - File modification time in nanoseconds since January 1st, 1970. May not use full resolution
|
|
||||||
* `file_size` - File size in bytes. `0` for directories
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -H "Accept: application/json" -L --location-trusted http://circuitpython.local/fs/lib/hello/
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"name": "world.txt",
|
|
||||||
"directory": false,
|
|
||||||
"modified_ns": 946934328000000000,
|
|
||||||
"file_size": 12
|
|
||||||
}
|
|
||||||
]
|
|
||||||
```
|
|
||||||
|
|
||||||
##### PUT
|
|
||||||
Tries to make a directory at the given path. Request body is ignored. The custom `X-Timestamp`
|
|
||||||
header can provide a timestamp in milliseconds since January 1st, 1970 (to match JavaScript's file
|
|
||||||
time resolution) used for the directories modification time. The RTC time will used otherwise.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
* `204 No Content` - Directory exists
|
|
||||||
* `201 Created` - Directory created
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
* `404 Not Found` - Missing parent directory
|
|
||||||
* `500 Server Error` - Other, unhandled error
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X PUT -L --location-trusted http://circuitpython.local/fs/lib/hello/world/
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Move
|
|
||||||
Moves the directory at the given path to ``X-Destination``. Also known as rename.
|
|
||||||
|
|
||||||
The custom `X-Destination` header stores the destination path of the directory.
|
|
||||||
|
|
||||||
* `201 Created` - Directory renamed
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - Source directory not found or destination path is missing
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
* `412 Precondition Failed` - The destination path is already in use
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X MOVE -H "X-Destination: /fs/lib/hello2/" -L --location-trusted http://circuitpython.local/fs/lib/hello/
|
|
||||||
```
|
|
||||||
|
|
||||||
##### DELETE
|
|
||||||
Deletes the directory and all of its contents.
|
|
||||||
|
|
||||||
* `204 No Content` - Directory and its contents deleted
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - No directory
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X DELETE -L --location-trusted http://circuitpython.local/fs/lib/hello2/world/
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### `/fs/<file path>`
|
|
||||||
|
|
||||||
##### PUT
|
|
||||||
Stores the provided content to the file path.
|
|
||||||
|
|
||||||
The custom `X-Timestamp` header can provide a timestamp in milliseconds since January 1st, 1970
|
|
||||||
(to match JavaScript's file time resolution) used for the directories modification time. The RTC
|
|
||||||
time will used otherwise.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
|
|
||||||
* `201 Created` - File created and saved
|
|
||||||
* `204 No Content` - File existed and overwritten
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - Missing parent directory
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
* `413 Payload Too Large` - `Expect` header not sent and file is too large
|
|
||||||
* `417 Expectation Failed` - `Expect` header sent and file is too large
|
|
||||||
* `500 Server Error` - Other, unhandled error
|
|
||||||
|
|
||||||
If the client sends the `Expect` header, the server will reply with `100 Continue` when ok.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
echo "Hello world" >> test.txt
|
|
||||||
curl -v -u :passw0rd -T test.txt -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
##### GET
|
|
||||||
Returns the raw file contents. `Content-Type` will be set based on extension:
|
|
||||||
|
|
||||||
* `text/plain` - `.py`, `.txt`
|
|
||||||
* `text/javascript` - `.js`
|
|
||||||
* `text/html` - `.html`
|
|
||||||
* `application/json` - `.json`
|
|
||||||
* `application/octet-stream` - Everything else
|
|
||||||
|
|
||||||
Will return:
|
|
||||||
* `200 OK` - File exists and file returned
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - Missing file
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
##### Move
|
|
||||||
Moves the file at the given path to the ``X-Destination``. Also known as rename.
|
|
||||||
|
|
||||||
The custom `X-Destination` header stores the destination path of the file.
|
|
||||||
|
|
||||||
* `201 Created` - File renamed
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - Source file not found or destination path is missing
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
* `412 Precondition Failed` - The destination path is already in use
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X MOVE -H "X-Destination: /fs/lib/hello/world2.txt" -L --location-trusted http://circuitpython.local/fs/lib/hello/world.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
##### DELETE
|
|
||||||
Deletes the file.
|
|
||||||
|
|
||||||
|
|
||||||
* `204 No Content` - File existed and deleted
|
|
||||||
* `401 Unauthorized` - Incorrect password
|
|
||||||
* `403 Forbidden` - No `CIRCUITPY_WEB_API_PASSWORD` set
|
|
||||||
* `404 Not Found` - File not found
|
|
||||||
* `409 Conflict` - USB is active and preventing file system modification
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -v -u :passw0rd -X DELETE -L --location-trusted http://circuitpython.local/fs/lib/hello/world2.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
### `/cp/`
|
|
||||||
|
|
||||||
`/cp/` serves basic info about the CircuitPython device and others discovered through MDNS. It is
|
|
||||||
not protected by basic auth in case the device is someone elses.
|
|
||||||
|
|
||||||
Only `GET` requests are supported and will return `405 Method Not Allowed` otherwise.
|
|
||||||
|
|
||||||
#### `/cp/devices.json`
|
|
||||||
|
|
||||||
Returns information about other devices found on the network using MDNS.
|
|
||||||
|
|
||||||
* `total`: Total MDNS response count. May be more than in `devices` if internal limits were hit.
|
|
||||||
* `devices`: List of discovered devices.
|
|
||||||
* `hostname`: MDNS hostname
|
|
||||||
* `instance_name`: MDNS instance name. Defaults to human readable board name.
|
|
||||||
* `port`: Port of CircuitPython Web API
|
|
||||||
* `ip`: IP address
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```sh
|
|
||||||
curl -v -L http://circuitpython.local/cp/devices.json
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"total": 1,
|
|
||||||
"devices": [
|
|
||||||
{
|
|
||||||
"hostname": "cpy-951032",
|
|
||||||
"instance_name": "Adafruit Feather ESP32-S2 TFT",
|
|
||||||
"port": 80,
|
|
||||||
"ip": "192.168.1.235"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `/cp/diskinfo.json`
|
|
||||||
|
|
||||||
Returns information about the attached disk(s). A list of objects, one per disk.
|
|
||||||
|
|
||||||
* `root`: Filesystem path to the root of the disk.
|
|
||||||
* `free`: Count of free bytes on the disk.
|
|
||||||
* `block_size`: Size of a block in bytes.
|
|
||||||
* `writable`: True when CircuitPython and the web workflow can write to the disk. USB may claim a disk instead.
|
|
||||||
* `total`: Total bytes that make up the disk.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```sh
|
|
||||||
curl -v -L http://circuitpython.local/cp/diskinfo.json
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
[{
|
|
||||||
"root": "/",
|
|
||||||
"free": 2964992,
|
|
||||||
"block_size": 512,
|
|
||||||
"writable": true,
|
|
||||||
"total": 2967552
|
|
||||||
}]
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `/cp/serial/`
|
|
||||||
|
|
||||||
|
|
||||||
Serves a basic serial terminal program when a `GET` request is received without the
|
|
||||||
`Upgrade: websocket` header. Otherwise the socket is upgraded to a WebSocket. See WebSockets below for more detail.
|
|
||||||
|
|
||||||
This is an authenticated endpoint in both modes.
|
|
||||||
|
|
||||||
#### `/cp/version.json`
|
|
||||||
|
|
||||||
Returns information about the device.
|
|
||||||
|
|
||||||
* `web_api_version`: Between `1` and `3`. This versions the rest of the API and new versions may not be backwards compatible. See below for more info.
|
|
||||||
* `version`: CircuitPython build version.
|
|
||||||
* `build_date`: CircuitPython build date.
|
|
||||||
* `board_name`: Human readable name of the board.
|
|
||||||
* `mcu_name`: Human readable name of the microcontroller.
|
|
||||||
* `board_id`: Board id used in code and on circuitpython.org.
|
|
||||||
* `creator_id`: Creator ID for the board.
|
|
||||||
* `creation_id`: Creation ID for the board, set by the creator.
|
|
||||||
* `hostname`: MDNS hostname.
|
|
||||||
* `port`: Port of CircuitPython Web Service.
|
|
||||||
* `ip`: IP address of the device.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```sh
|
|
||||||
curl -v -L http://circuitpython.local/cp/version.json
|
|
||||||
```
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"web_api_version": 1,
|
|
||||||
"version": "8.0.0-alpha.1-20-ge1d4518a9-dirty",
|
|
||||||
"build_date": "2022-06-24",
|
|
||||||
"board_name": "ESP32-S3-USB-OTG-N8",
|
|
||||||
"mcu_name": "ESP32S3",
|
|
||||||
"board_id": "espressif_esp32s3_usb_otg_n8",
|
|
||||||
"creator_id": 12346,
|
|
||||||
"creation_id": 28683,
|
|
||||||
"hostname": "cpy-f57ce8",
|
|
||||||
"port": 80,
|
|
||||||
"ip": "192.168.1.94"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `/code/`
|
|
||||||
|
|
||||||
The `/code/` page returns a small static html page that will pull in and load the full code editor from
|
|
||||||
[code.circuitpython.org](https://code.circuitpython.org) for a full code editor experience. Because most
|
|
||||||
of the resources reside online instead of the device, an active internet connection is required.
|
|
||||||
|
|
||||||
### Static files
|
|
||||||
|
|
||||||
* `/favicon.ico` - Blinka
|
|
||||||
* `/directory.js` - JavaScript for `/fs/`
|
|
||||||
* `/welcome.js` - JavaScript for `/`
|
|
||||||
|
|
||||||
### WebSocket
|
|
||||||
|
|
||||||
The CircuitPython serial interactions are available over a WebSocket. A WebSocket begins as a
|
|
||||||
special HTTP request that gets upgraded to a WebSocket. Authentication happens before upgrading.
|
|
||||||
|
|
||||||
WebSockets are *not* bare sockets once upgraded. Instead they have their own framing format for data.
|
|
||||||
CircuitPython can handle PING and CLOSE opcodes. All others are treated as TEXT. Data to
|
|
||||||
CircuitPython is expected to be masked UTF-8, as the spec requires. Data from CircuitPython to the
|
|
||||||
client is unmasked. It is also unbuffered so the client will get a variety of frame sizes.
|
|
||||||
|
|
||||||
Only one WebSocket at a time is supported.
|
|
||||||
|
|
||||||
### Versions
|
|
||||||
|
|
||||||
* `1` - Initial version.
|
|
||||||
* `2` - Added `/cp/diskinfo.json`.
|
|
||||||
* `3` - Changed `/cp/diskinfo.json` to return a list in preparation for multi-disk support.
|
|
@ -1,74 +0,0 @@
|
|||||||
# Dynamic Native Modules
|
|
||||||
|
|
||||||
Dynamic Native Modules are .mpy files that contain native machine code from a
|
|
||||||
language other than Python. For more info see [the documentation]
|
|
||||||
(https://docs.micropython.org/en/latest/develop/natmod.html).
|
|
||||||
|
|
||||||
This should not be confused with [User C Modules]
|
|
||||||
(https://docs.micropython.org/en/latest/develop/cmodules.html) which are a
|
|
||||||
mechanism to add additional out-of-tree modules into the firmware build.
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
This directory contains several examples of writing dynamic native modules, in
|
|
||||||
two main categories:
|
|
||||||
|
|
||||||
1. Feature examples.
|
|
||||||
|
|
||||||
* `features0` - A module containing a single "factorial" function which
|
|
||||||
demonstrates working with integers.
|
|
||||||
|
|
||||||
* `features1` - A module that demonstrates some common tasks:
|
|
||||||
- defining simple functions exposed to Python
|
|
||||||
- defining local, helper C functions
|
|
||||||
- defining constant integers and strings exposed to Python
|
|
||||||
- getting and creating integer objects
|
|
||||||
- creating Python lists
|
|
||||||
- raising exceptions
|
|
||||||
- allocating memory
|
|
||||||
- BSS and constant data (rodata)
|
|
||||||
- relocated pointers in rodata
|
|
||||||
|
|
||||||
* `features2` - This is a hybrid module containing both Python and C code,
|
|
||||||
and additionally the C code is spread over multiple files. It also
|
|
||||||
demonstrates using floating point (only when the target supports
|
|
||||||
hardware floating point).
|
|
||||||
|
|
||||||
* `features3` - A module that shows how to use types, constant objects,
|
|
||||||
and creating dictionary instances.
|
|
||||||
|
|
||||||
* `features4` - A module that demonstrates how to define a class.
|
|
||||||
|
|
||||||
2. Dynamic version of existing built-ins.
|
|
||||||
|
|
||||||
This provides a way to add missing functionality to firmware that doesn't
|
|
||||||
include certain built-in modules. See the `heapq`, `random`, `re`,
|
|
||||||
`deflate`, `btree`, and `framebuf` directories.
|
|
||||||
|
|
||||||
So for example, if your firmware was compiled with `MICROPY_PY_FRAMEBUF`
|
|
||||||
disabled (e.g. to save flash space), then it would not include the
|
|
||||||
`framebuf` module. The `framebuf` native module provides a way to add the
|
|
||||||
`framebuf` module dynamically.
|
|
||||||
|
|
||||||
The way these work is they define a dynamic native module which
|
|
||||||
`#include`'s the original module and then does the necessary
|
|
||||||
initialisation of the module's globals dict.
|
|
||||||
|
|
||||||
## Build instructions
|
|
||||||
|
|
||||||
To compile an example, you need to have the same toolchain available as
|
|
||||||
required for your target port. e.g. `arm-none-eabi-gcc` for any ARM Cortex M
|
|
||||||
target. See the port instructions for details.
|
|
||||||
|
|
||||||
You also need to have the `pyelftools` Python package available, either via
|
|
||||||
your system package manager or installed from PyPI in a virtual environment
|
|
||||||
with `pip`.
|
|
||||||
|
|
||||||
Each example provides a Makefile. You should specify the `ARCH` argument to
|
|
||||||
make (one of x86, x64, armv6m, armv7m, xtensa, xtensawin):
|
|
||||||
|
|
||||||
```
|
|
||||||
$ cd features0
|
|
||||||
$ make ARCH=armv7m
|
|
||||||
$ mpremote cp features0.mpy :
|
|
||||||
```
|
|
37
examples/natmod/btree/Makefile
Normal file
37
examples/natmod/btree/Makefile
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# Location of top-level MicroPython directory
|
||||||
|
MPY_DIR = ../../..
|
||||||
|
|
||||||
|
# Name of module (different to built-in btree so it can coexist)
|
||||||
|
MOD = btree_$(ARCH)
|
||||||
|
|
||||||
|
# Source files (.c or .py)
|
||||||
|
SRC = btree_c.c btree_py.py
|
||||||
|
|
||||||
|
# Architecture to build for (x86, x64, armv7m, xtensa, xtensawin)
|
||||||
|
ARCH = x64
|
||||||
|
|
||||||
|
BTREE_DIR = $(MPY_DIR)/lib/berkeley-db-1.xx
|
||||||
|
BTREE_DEFS = -D__DBINTERFACE_PRIVATE=1 -Dmpool_error="(void)" -Dabort=abort_ "-Dvirt_fd_t=void*" $(BTREE_DEFS_EXTRA)
|
||||||
|
CFLAGS += -I$(BTREE_DIR)/PORT/include
|
||||||
|
CFLAGS += -Wno-old-style-definition -Wno-sign-compare -Wno-unused-parameter $(BTREE_DEFS)
|
||||||
|
|
||||||
|
SRC += $(addprefix $(realpath $(BTREE_DIR))/,\
|
||||||
|
btree/bt_close.c \
|
||||||
|
btree/bt_conv.c \
|
||||||
|
btree/bt_delete.c \
|
||||||
|
btree/bt_get.c \
|
||||||
|
btree/bt_open.c \
|
||||||
|
btree/bt_overflow.c \
|
||||||
|
btree/bt_page.c \
|
||||||
|
btree/bt_put.c \
|
||||||
|
btree/bt_search.c \
|
||||||
|
btree/bt_seq.c \
|
||||||
|
btree/bt_split.c \
|
||||||
|
btree/bt_utils.c \
|
||||||
|
mpool/mpool.c \
|
||||||
|
)
|
||||||
|
|
||||||
|
include $(MPY_DIR)/py/dynruntime.mk
|
||||||
|
|
||||||
|
# btree needs gnu99 defined
|
||||||
|
CFLAGS += -std=gnu99
|
148
examples/natmod/btree/btree_c.c
Normal file
148
examples/natmod/btree/btree_c.c
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
#define MICROPY_PY_BTREE (1)
|
||||||
|
|
||||||
|
#include "py/dynruntime.h"
|
||||||
|
|
||||||
|
#include <unistd.h>
|
||||||
|
|
||||||
|
#if !defined(__linux__)
|
||||||
|
void *memcpy(void *dst, const void *src, size_t n) {
|
||||||
|
return mp_fun_table.memmove_(dst, src, n);
|
||||||
|
}
|
||||||
|
void *memset(void *s, int c, size_t n) {
|
||||||
|
return mp_fun_table.memset_(s, c, n);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
void *memmove(void *dest, const void *src, size_t n) {
|
||||||
|
return mp_fun_table.memmove_(dest, src, n);
|
||||||
|
}
|
||||||
|
|
||||||
|
void *malloc(size_t n) {
|
||||||
|
void *ptr = m_malloc(n, false);
|
||||||
|
return ptr;
|
||||||
|
}
|
||||||
|
void *realloc(void *ptr, size_t n) {
|
||||||
|
mp_printf(&mp_plat_print, "UNDEF %d\n", __LINE__);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
void *calloc(size_t n, size_t m) {
|
||||||
|
void *ptr = m_malloc(n * m, false);
|
||||||
|
// memory already cleared by conservative GC
|
||||||
|
return ptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void free(void *ptr) {
|
||||||
|
m_free(ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
void abort_(void) {
|
||||||
|
nlr_raise(mp_obj_new_exception(mp_load_global(MP_QSTR_RuntimeError)));
|
||||||
|
}
|
||||||
|
|
||||||
|
int native_errno;
|
||||||
|
#if defined(__linux__)
|
||||||
|
int *__errno_location (void)
|
||||||
|
#else
|
||||||
|
int *__errno (void)
|
||||||
|
#endif
|
||||||
|
{
|
||||||
|
return &native_errno;
|
||||||
|
}
|
||||||
|
|
||||||
|
ssize_t mp_stream_posix_write(void *stream, const void *buf, size_t len) {
|
||||||
|
mp_obj_base_t* o = stream;
|
||||||
|
const mp_stream_p_t *stream_p = o->type->ext[0].protocol;
|
||||||
|
mp_uint_t out_sz = stream_p->write(MP_OBJ_FROM_PTR(stream), buf, len, &native_errno);
|
||||||
|
if (out_sz == MP_STREAM_ERROR) {
|
||||||
|
return -1;
|
||||||
|
} else {
|
||||||
|
return out_sz;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ssize_t mp_stream_posix_read(void *stream, void *buf, size_t len) {
|
||||||
|
mp_obj_base_t* o = stream;
|
||||||
|
const mp_stream_p_t *stream_p = o->type->ext[0].protocol;
|
||||||
|
mp_uint_t out_sz = stream_p->read(MP_OBJ_FROM_PTR(stream), buf, len, &native_errno);
|
||||||
|
if (out_sz == MP_STREAM_ERROR) {
|
||||||
|
return -1;
|
||||||
|
} else {
|
||||||
|
return out_sz;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
off_t mp_stream_posix_lseek(void *stream, off_t offset, int whence) {
|
||||||
|
const mp_obj_base_t* o = stream;
|
||||||
|
const mp_stream_p_t *stream_p = o->type->ext[0].protocol;
|
||||||
|
struct mp_stream_seek_t seek_s;
|
||||||
|
seek_s.offset = offset;
|
||||||
|
seek_s.whence = whence;
|
||||||
|
mp_uint_t res = stream_p->ioctl(MP_OBJ_FROM_PTR(stream), MP_STREAM_SEEK, (mp_uint_t)(uintptr_t)&seek_s, &native_errno);
|
||||||
|
if (res == MP_STREAM_ERROR) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return seek_s.offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
int mp_stream_posix_fsync(void *stream) {
|
||||||
|
mp_obj_base_t* o = stream;
|
||||||
|
const mp_stream_p_t *stream_p = o->type->ext[0].protocol;
|
||||||
|
mp_uint_t res = stream_p->ioctl(MP_OBJ_FROM_PTR(stream), MP_STREAM_FLUSH, 0, &native_errno);
|
||||||
|
if (res == MP_STREAM_ERROR) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
mp_obj_full_type_t btree_type;
|
||||||
|
|
||||||
|
#include "extmod/modbtree.c"
|
||||||
|
|
||||||
|
mp_map_elem_t btree_locals_dict_table[8];
|
||||||
|
STATIC MP_DEFINE_CONST_DICT(btree_locals_dict, btree_locals_dict_table);
|
||||||
|
|
||||||
|
STATIC mp_obj_t btree_open(size_t n_args, const mp_obj_t *args) {
|
||||||
|
// Make sure we got a stream object
|
||||||
|
mp_get_stream_raise(args[0], MP_STREAM_OP_READ | MP_STREAM_OP_WRITE | MP_STREAM_OP_IOCTL);
|
||||||
|
|
||||||
|
BTREEINFO openinfo = {0};
|
||||||
|
openinfo.flags = mp_obj_get_int(args[1]);
|
||||||
|
openinfo.cachesize = mp_obj_get_int(args[2]);
|
||||||
|
openinfo.psize = mp_obj_get_int(args[3]);
|
||||||
|
openinfo.minkeypage = mp_obj_get_int(args[4]);
|
||||||
|
DB *db = __bt_open(MP_OBJ_TO_PTR(args[0]), &btree_stream_fvtable, &openinfo, 0);
|
||||||
|
if (db == NULL) {
|
||||||
|
mp_raise_OSError(native_errno);
|
||||||
|
}
|
||||||
|
|
||||||
|
return MP_OBJ_FROM_PTR(btree_new(db, args[0]));
|
||||||
|
}
|
||||||
|
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(btree_open_obj, 5, 5, btree_open);
|
||||||
|
|
||||||
|
mp_obj_t mpy_init(mp_obj_fun_bc_t *self, size_t n_args, size_t n_kw, mp_obj_t *args) {
|
||||||
|
MP_DYNRUNTIME_INIT_ENTRY
|
||||||
|
|
||||||
|
btree_type.base.type = (void*)&mp_fun_table.type_type;
|
||||||
|
btree_type.flags = MP_TYPE_FLAG_EXTENDED;
|
||||||
|
btree_type.name = MP_QSTR_btree;
|
||||||
|
btree_type.print = btree_print;
|
||||||
|
btree_type.ext[0].getiter = btree_getiter;
|
||||||
|
btree_type.ext[0].iternext = btree_iternext;
|
||||||
|
btree_type.ext[0].binary_op = btree_binary_op;
|
||||||
|
btree_type.ext[0].subscr = btree_subscr;
|
||||||
|
btree_locals_dict_table[0] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_close), MP_OBJ_FROM_PTR(&btree_close_obj) };
|
||||||
|
btree_locals_dict_table[1] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_flush), MP_OBJ_FROM_PTR(&btree_flush_obj) };
|
||||||
|
btree_locals_dict_table[2] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_get), MP_OBJ_FROM_PTR(&btree_get_obj) };
|
||||||
|
btree_locals_dict_table[3] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_put), MP_OBJ_FROM_PTR(&btree_put_obj) };
|
||||||
|
btree_locals_dict_table[4] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_seq), MP_OBJ_FROM_PTR(&btree_seq_obj) };
|
||||||
|
btree_locals_dict_table[5] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_keys), MP_OBJ_FROM_PTR(&btree_keys_obj) };
|
||||||
|
btree_locals_dict_table[6] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_values), MP_OBJ_FROM_PTR(&btree_values_obj) };
|
||||||
|
btree_locals_dict_table[7] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_items), MP_OBJ_FROM_PTR(&btree_items_obj) };
|
||||||
|
btree_type.locals_dict = (void*)&btree_locals_dict;
|
||||||
|
|
||||||
|
mp_store_global(MP_QSTR__open, MP_OBJ_FROM_PTR(&btree_open_obj));
|
||||||
|
mp_store_global(MP_QSTR_INCL, MP_OBJ_NEW_SMALL_INT(FLAG_END_KEY_INCL));
|
||||||
|
mp_store_global(MP_QSTR_DESC, MP_OBJ_NEW_SMALL_INT(FLAG_DESC));
|
||||||
|
|
||||||
|
MP_DYNRUNTIME_INIT_EXIT
|
||||||
|
}
|
3
examples/natmod/btree/btree_py.py
Normal file
3
examples/natmod/btree/btree_py.py
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
# Implemented in Python to support keyword arguments
|
||||||
|
def open(stream, *, flags=0, cachesize=0, pagesize=0, minkeypage=0):
|
||||||
|
return _open(stream, flags, cachesize, pagesize, minkeypage)
|
@ -1,70 +0,0 @@
|
|||||||
#define MICROPY_PY_DEFLATE (1)
|
|
||||||
#define MICROPY_PY_DEFLATE_COMPRESS (1)
|
|
||||||
|
|
||||||
#include "py/dynruntime.h"
|
|
||||||
|
|
||||||
#if !defined(__linux__)
|
|
||||||
void *memcpy(void *dst, const void *src, size_t n) {
|
|
||||||
return mp_fun_table.memmove_(dst, src, n);
|
|
||||||
}
|
|
||||||
void *memset(void *s, int c, size_t n) {
|
|
||||||
return mp_fun_table.memset_(s, c, n);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
mp_obj_full_type_t deflateio_type;
|
|
||||||
|
|
||||||
#include "extmod/moddeflate.c"
|
|
||||||
|
|
||||||
// Re-implemented from py/stream.c, not yet available in dynruntime.h.
|
|
||||||
mp_obj_t mp_stream_close(mp_obj_t stream) {
|
|
||||||
const mp_stream_p_t *stream_p = mp_get_stream(stream);
|
|
||||||
int error;
|
|
||||||
mp_uint_t res = stream_p->ioctl(stream, MP_STREAM_CLOSE, 0, &error);
|
|
||||||
if (res == MP_STREAM_ERROR) {
|
|
||||||
mp_raise_OSError(error);
|
|
||||||
}
|
|
||||||
return mp_const_none;
|
|
||||||
}
|
|
||||||
MP_DEFINE_CONST_FUN_OBJ_1(mp_stream_close_obj, mp_stream_close);
|
|
||||||
|
|
||||||
// Re-implemented from py/stream.c, not yet available in dynruntime.h.
|
|
||||||
STATIC mp_obj_t mp_stream___exit__(size_t n_args, const mp_obj_t *args) {
|
|
||||||
(void)n_args;
|
|
||||||
return mp_stream_close(args[0]);
|
|
||||||
}
|
|
||||||
MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(mp_stream___exit___obj, 4, 4, mp_stream___exit__);
|
|
||||||
|
|
||||||
// Re-implemented from obj.c, not yet available in dynruntime.h.
|
|
||||||
mp_obj_t mp_identity(mp_obj_t self) {
|
|
||||||
return self;
|
|
||||||
}
|
|
||||||
MP_DEFINE_CONST_FUN_OBJ_1(mp_identity_obj, mp_identity);
|
|
||||||
|
|
||||||
mp_map_elem_t deflateio_locals_dict_table[7];
|
|
||||||
STATIC MP_DEFINE_CONST_DICT(deflateio_locals_dict, deflateio_locals_dict_table);
|
|
||||||
|
|
||||||
mp_obj_t mpy_init(mp_obj_fun_bc_t *self, size_t n_args, size_t n_kw, mp_obj_t *args) {
|
|
||||||
MP_DYNRUNTIME_INIT_ENTRY
|
|
||||||
|
|
||||||
deflateio_type.base.type = mp_fun_table.type_type;
|
|
||||||
deflateio_type.name = MP_QSTR_DeflateIO;
|
|
||||||
MP_OBJ_TYPE_SET_SLOT(&deflateio_type, make_new, &deflateio_make_new, 0);
|
|
||||||
MP_OBJ_TYPE_SET_SLOT(&deflateio_type, protocol, &deflateio_stream_p, 1);
|
|
||||||
deflateio_locals_dict_table[0] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_read), MP_OBJ_FROM_PTR(&mp_stream_read_obj) };
|
|
||||||
deflateio_locals_dict_table[1] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_readinto), MP_OBJ_FROM_PTR(&mp_stream_readinto_obj) };
|
|
||||||
deflateio_locals_dict_table[2] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_readline), MP_OBJ_FROM_PTR(&mp_stream_unbuffered_readline_obj) };
|
|
||||||
deflateio_locals_dict_table[3] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_write), MP_OBJ_FROM_PTR(&mp_stream_write_obj) };
|
|
||||||
deflateio_locals_dict_table[4] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR_close), MP_OBJ_FROM_PTR(&mp_stream_close_obj) };
|
|
||||||
deflateio_locals_dict_table[5] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR___enter__), MP_OBJ_FROM_PTR(&mp_identity_obj) };
|
|
||||||
deflateio_locals_dict_table[6] = (mp_map_elem_t){ MP_OBJ_NEW_QSTR(MP_QSTR___exit__), MP_OBJ_FROM_PTR(&mp_stream___exit___obj) };
|
|
||||||
MP_OBJ_TYPE_SET_SLOT(&deflateio_type, locals_dict, (void*)&deflateio_locals_dict, 2);
|
|
||||||
|
|
||||||
mp_store_global(MP_QSTR___name__, MP_OBJ_NEW_QSTR(MP_QSTR_deflate));
|
|
||||||
mp_store_global(MP_QSTR_DeflateIO, MP_OBJ_FROM_PTR(&deflateio_type));
|
|
||||||
mp_store_global(MP_QSTR_RAW, MP_OBJ_NEW_SMALL_INT(DEFLATEIO_FORMAT_RAW));
|
|
||||||
mp_store_global(MP_QSTR_ZLIB, MP_OBJ_NEW_SMALL_INT(DEFLATEIO_FORMAT_ZLIB));
|
|
||||||
mp_store_global(MP_QSTR_GZIP, MP_OBJ_NEW_SMALL_INT(DEFLATEIO_FORMAT_GZIP));
|
|
||||||
|
|
||||||
MP_DYNRUNTIME_INIT_EXIT
|
|
||||||
}
|
|
@ -88,7 +88,7 @@ mp_obj_t mpy_init(mp_obj_fun_bc_t *self, size_t n_args, size_t n_kw, mp_obj_t *a
|
|||||||
// This must be first, it sets up the globals dict and other things
|
// This must be first, it sets up the globals dict and other things
|
||||||
MP_DYNRUNTIME_INIT_ENTRY
|
MP_DYNRUNTIME_INIT_ENTRY
|
||||||
|
|
||||||
// Messages can be printed as usual
|
// Messages can be printed as usualy
|
||||||
mp_printf(&mp_plat_print, "initialising module self=%p\n", self);
|
mp_printf(&mp_plat_print, "initialising module self=%p\n", self);
|
||||||
|
|
||||||
// Make the functions available in the module's namespace
|
// Make the functions available in the module's namespace
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
# This Python code will be merged with the C code in main.c
|
# This Python code will be merged with the C code in main.c
|
||||||
|
|
||||||
# ruff: noqa: F821 - this file is evaluated with C-defined names in scope
|
|
||||||
|
|
||||||
import array
|
import array
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
# Location of top-level MicroPython directory
|
|
||||||
MPY_DIR = ../../..
|
|
||||||
|
|
||||||
# Name of module
|
|
||||||
MOD = features3
|
|
||||||
|
|
||||||
# Source files (.c or .py)
|
|
||||||
SRC = features3.c
|
|
||||||
|
|
||||||
# Architecture to build for (x86, x64, armv7m, xtensa, xtensawin)
|
|
||||||
ARCH = x64
|
|
||||||
|
|
||||||
# Include to get the rules for compiling and linking the module
|
|
||||||
include $(MPY_DIR)/py/dynruntime.mk
|
|
@ -1,60 +0,0 @@
|
|||||||
/* This example demonstrates the following features in a native module:
|
|
||||||
- using types
|
|
||||||
- using constant objects
|
|
||||||
- creating dictionaries
|
|
||||||
*/
|
|
||||||
|
|
||||||
// Include the header file to get access to the MicroPython API.
|
|
||||||
#include "py/dynruntime.h"
|
|
||||||
|
|
||||||
// A function that returns a tuple of object types.
|
|
||||||
STATIC mp_obj_t get_types(void) {
|
|
||||||
return mp_obj_new_tuple(9, ((mp_obj_t []) {
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_type),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_NoneType),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_bool),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_int),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_str),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_bytes),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_tuple),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_list),
|
|
||||||
MP_OBJ_FROM_PTR(&mp_type_dict),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(get_types_obj, get_types);
|
|
||||||
|
|
||||||
// A function that returns a tuple of constant objects.
|
|
||||||
STATIC mp_obj_t get_const_objects(void) {
|
|
||||||
return mp_obj_new_tuple(5, ((mp_obj_t []) {
|
|
||||||
mp_const_none,
|
|
||||||
mp_const_false,
|
|
||||||
mp_const_true,
|
|
||||||
mp_const_empty_bytes,
|
|
||||||
mp_const_empty_tuple,
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
STATIC MP_DEFINE_CONST_FUN_OBJ_0(get_const_objects_obj, get_const_objects);
|
|
||||||
|
|
||||||
// A function that creates a dictionary from the given arguments.
|
|
||||||
STATIC mp_obj_t make_dict(size_t n_args, const mp_obj_t *args) {
|
|
||||||
mp_obj_t dict = mp_obj_new_dict(n_args / 2);
|
|
||||||
for (; n_args >= 2; n_args -= 2, args += 2) {
|
|
||||||
mp_obj_dict_store(dict, args[0], args[1]);
|
|
||||||
}
|
|
||||||
return dict;
|
|
||||||
}
|
|
||||||
STATIC MP_DEFINE_CONST_FUN_OBJ_VAR_BETWEEN(make_dict_obj, 0, MP_OBJ_FUN_ARGS_MAX, make_dict);
|
|
||||||
|
|
||||||
// This is the entry point and is called when the module is imported.
|
|
||||||
mp_obj_t mpy_init(mp_obj_fun_bc_t *self, size_t n_args, size_t n_kw, mp_obj_t *args) {
|
|
||||||
// This must be first, it sets up the globals dict and other things.
|
|
||||||
MP_DYNRUNTIME_INIT_ENTRY
|
|
||||||
|
|
||||||
// Make the functions available in the module's namespace.
|
|
||||||
mp_store_global(MP_QSTR_make_dict, MP_OBJ_FROM_PTR(&make_dict_obj));
|
|
||||||
mp_store_global(MP_QSTR_get_types, MP_OBJ_FROM_PTR(&get_types_obj));
|
|
||||||
mp_store_global(MP_QSTR_get_const_objects, MP_OBJ_FROM_PTR(&get_const_objects_obj));
|
|
||||||
|
|
||||||
// This must be last, it restores the globals dict.
|
|
||||||
MP_DYNRUNTIME_INIT_EXIT
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
# Location of top-level MicroPython directory
|
|
||||||
MPY_DIR = ../../..
|
|
||||||
|
|
||||||
# Name of module
|
|
||||||
MOD = features4
|
|
||||||
|
|
||||||
# Source files (.c or .py)
|
|
||||||
SRC = features4.c
|
|
||||||
|
|
||||||
# Architecture to build for (x86, x64, armv7m, xtensa, xtensawin)
|
|
||||||
ARCH = x64
|
|
||||||
|
|
||||||
# Include to get the rules for compiling and linking the module
|
|
||||||
include $(MPY_DIR)/py/dynruntime.mk
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user