first save

This commit is contained in:
tiijay
2025-10-19 18:29:10 +02:00
commit b5a30adb27
1303 changed files with 234711 additions and 0 deletions

247
.venv/bin/Activate.ps1 Normal file
View File

@@ -0,0 +1,247 @@
<#
.Synopsis
Activate a Python virtual environment for the current PowerShell session.
.Description
Pushes the python executable for a virtual environment to the front of the
$Env:PATH environment variable and sets the prompt to signify that you are
in a Python virtual environment. Makes use of the command line switches as
well as the `pyvenv.cfg` file values present in the virtual environment.
.Parameter VenvDir
Path to the directory that contains the virtual environment to activate. The
default value for this is the parent of the directory that the Activate.ps1
script is located within.
.Parameter Prompt
The prompt prefix to display when this virtual environment is activated. By
default, this prompt is the name of the virtual environment folder (VenvDir)
surrounded by parentheses and followed by a single space (ie. '(.venv) ').
.Example
Activate.ps1
Activates the Python virtual environment that contains the Activate.ps1 script.
.Example
Activate.ps1 -Verbose
Activates the Python virtual environment that contains the Activate.ps1 script,
and shows extra information about the activation as it executes.
.Example
Activate.ps1 -VenvDir C:\Users\MyUser\Common\.venv
Activates the Python virtual environment located in the specified location.
.Example
Activate.ps1 -Prompt "MyPython"
Activates the Python virtual environment that contains the Activate.ps1 script,
and prefixes the current prompt with the specified string (surrounded in
parentheses) while the virtual environment is active.
.Notes
On Windows, it may be required to enable this Activate.ps1 script by setting the
execution policy for the user. You can do this by issuing the following PowerShell
command:
PS C:\> Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
For more information on Execution Policies:
https://go.microsoft.com/fwlink/?LinkID=135170
#>
Param(
[Parameter(Mandatory = $false)]
[String]
$VenvDir,
[Parameter(Mandatory = $false)]
[String]
$Prompt
)
<# Function declarations --------------------------------------------------- #>
<#
.Synopsis
Remove all shell session elements added by the Activate script, including the
addition of the virtual environment's Python executable from the beginning of
the PATH variable.
.Parameter NonDestructive
If present, do not remove this function from the global namespace for the
session.
#>
function global:deactivate ([switch]$NonDestructive) {
# Revert to original values
# The prior prompt:
if (Test-Path -Path Function:_OLD_VIRTUAL_PROMPT) {
Copy-Item -Path Function:_OLD_VIRTUAL_PROMPT -Destination Function:prompt
Remove-Item -Path Function:_OLD_VIRTUAL_PROMPT
}
# The prior PYTHONHOME:
if (Test-Path -Path Env:_OLD_VIRTUAL_PYTHONHOME) {
Copy-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME -Destination Env:PYTHONHOME
Remove-Item -Path Env:_OLD_VIRTUAL_PYTHONHOME
}
# The prior PATH:
if (Test-Path -Path Env:_OLD_VIRTUAL_PATH) {
Copy-Item -Path Env:_OLD_VIRTUAL_PATH -Destination Env:PATH
Remove-Item -Path Env:_OLD_VIRTUAL_PATH
}
# Just remove the VIRTUAL_ENV altogether:
if (Test-Path -Path Env:VIRTUAL_ENV) {
Remove-Item -Path env:VIRTUAL_ENV
}
# Just remove VIRTUAL_ENV_PROMPT altogether.
if (Test-Path -Path Env:VIRTUAL_ENV_PROMPT) {
Remove-Item -Path env:VIRTUAL_ENV_PROMPT
}
# Just remove the _PYTHON_VENV_PROMPT_PREFIX altogether:
if (Get-Variable -Name "_PYTHON_VENV_PROMPT_PREFIX" -ErrorAction SilentlyContinue) {
Remove-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Scope Global -Force
}
# Leave deactivate function in the global namespace if requested:
if (-not $NonDestructive) {
Remove-Item -Path function:deactivate
}
}
<#
.Description
Get-PyVenvConfig parses the values from the pyvenv.cfg file located in the
given folder, and returns them in a map.
For each line in the pyvenv.cfg file, if that line can be parsed into exactly
two strings separated by `=` (with any amount of whitespace surrounding the =)
then it is considered a `key = value` line. The left hand string is the key,
the right hand is the value.
If the value starts with a `'` or a `"` then the first and last character is
stripped from the value before being captured.
.Parameter ConfigDir
Path to the directory that contains the `pyvenv.cfg` file.
#>
function Get-PyVenvConfig(
[String]
$ConfigDir
) {
Write-Verbose "Given ConfigDir=$ConfigDir, obtain values in pyvenv.cfg"
# Ensure the file exists, and issue a warning if it doesn't (but still allow the function to continue).
$pyvenvConfigPath = Join-Path -Resolve -Path $ConfigDir -ChildPath 'pyvenv.cfg' -ErrorAction Continue
# An empty map will be returned if no config file is found.
$pyvenvConfig = @{ }
if ($pyvenvConfigPath) {
Write-Verbose "File exists, parse `key = value` lines"
$pyvenvConfigContent = Get-Content -Path $pyvenvConfigPath
$pyvenvConfigContent | ForEach-Object {
$keyval = $PSItem -split "\s*=\s*", 2
if ($keyval[0] -and $keyval[1]) {
$val = $keyval[1]
# Remove extraneous quotations around a string value.
if ("'""".Contains($val.Substring(0, 1))) {
$val = $val.Substring(1, $val.Length - 2)
}
$pyvenvConfig[$keyval[0]] = $val
Write-Verbose "Adding Key: '$($keyval[0])'='$val'"
}
}
}
return $pyvenvConfig
}
<# Begin Activate script --------------------------------------------------- #>
# Determine the containing directory of this script
$VenvExecPath = Split-Path -Parent $MyInvocation.MyCommand.Definition
$VenvExecDir = Get-Item -Path $VenvExecPath
Write-Verbose "Activation script is located in path: '$VenvExecPath'"
Write-Verbose "VenvExecDir Fullname: '$($VenvExecDir.FullName)"
Write-Verbose "VenvExecDir Name: '$($VenvExecDir.Name)"
# Set values required in priority: CmdLine, ConfigFile, Default
# First, get the location of the virtual environment, it might not be
# VenvExecDir if specified on the command line.
if ($VenvDir) {
Write-Verbose "VenvDir given as parameter, using '$VenvDir' to determine values"
}
else {
Write-Verbose "VenvDir not given as a parameter, using parent directory name as VenvDir."
$VenvDir = $VenvExecDir.Parent.FullName.TrimEnd("\\/")
Write-Verbose "VenvDir=$VenvDir"
}
# Next, read the `pyvenv.cfg` file to determine any required value such
# as `prompt`.
$pyvenvCfg = Get-PyVenvConfig -ConfigDir $VenvDir
# Next, set the prompt from the command line, or the config file, or
# just use the name of the virtual environment folder.
if ($Prompt) {
Write-Verbose "Prompt specified as argument, using '$Prompt'"
}
else {
Write-Verbose "Prompt not specified as argument to script, checking pyvenv.cfg value"
if ($pyvenvCfg -and $pyvenvCfg['prompt']) {
Write-Verbose " Setting based on value in pyvenv.cfg='$($pyvenvCfg['prompt'])'"
$Prompt = $pyvenvCfg['prompt'];
}
else {
Write-Verbose " Setting prompt based on parent's directory's name. (Is the directory name passed to venv module when creating the virtual environment)"
Write-Verbose " Got leaf-name of $VenvDir='$(Split-Path -Path $venvDir -Leaf)'"
$Prompt = Split-Path -Path $venvDir -Leaf
}
}
Write-Verbose "Prompt = '$Prompt'"
Write-Verbose "VenvDir='$VenvDir'"
# Deactivate any currently active virtual environment, but leave the
# deactivate function in place.
deactivate -nondestructive
# Now set the environment variable VIRTUAL_ENV, used by many tools to determine
# that there is an activated venv.
$env:VIRTUAL_ENV = $VenvDir
if (-not $Env:VIRTUAL_ENV_DISABLE_PROMPT) {
Write-Verbose "Setting prompt to '$Prompt'"
# Set the prompt to include the env name
# Make sure _OLD_VIRTUAL_PROMPT is global
function global:_OLD_VIRTUAL_PROMPT { "" }
Copy-Item -Path function:prompt -Destination function:_OLD_VIRTUAL_PROMPT
New-Variable -Name _PYTHON_VENV_PROMPT_PREFIX -Description "Python virtual environment prompt prefix" -Scope Global -Option ReadOnly -Visibility Public -Value $Prompt
function global:prompt {
Write-Host -NoNewline -ForegroundColor Green "($_PYTHON_VENV_PROMPT_PREFIX) "
_OLD_VIRTUAL_PROMPT
}
$env:VIRTUAL_ENV_PROMPT = $Prompt
}
# Clear PYTHONHOME
if (Test-Path -Path Env:PYTHONHOME) {
Copy-Item -Path Env:PYTHONHOME -Destination Env:_OLD_VIRTUAL_PYTHONHOME
Remove-Item -Path Env:PYTHONHOME
}
# Add the venv to the PATH
Copy-Item -Path Env:PATH -Destination Env:_OLD_VIRTUAL_PATH
$Env:PATH = "$VenvExecDir$([System.IO.Path]::PathSeparator)$Env:PATH"

70
.venv/bin/activate Normal file
View File

@@ -0,0 +1,70 @@
# This file must be used with "source bin/activate" *from bash*
# You cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
unset VIRTUAL_ENV_PROMPT
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
# on Windows, a path can contain colons and backslashes and has to be converted:
if [ "${OSTYPE:-}" = "cygwin" ] || [ "${OSTYPE:-}" = "msys" ] ; then
# transform D:\path\to\venv to /d/path/to/venv on MSYS
# and to /cygdrive/d/path/to/venv on Cygwin
export VIRTUAL_ENV=$(cygpath /home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv)
else
# use the path as-is
export VIRTUAL_ENV=/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv
fi
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/"bin":$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1='(.venv) '"${PS1:-}"
export PS1
VIRTUAL_ENV_PROMPT='(.venv) '
export VIRTUAL_ENV_PROMPT
fi
# Call hash to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
hash -r 2> /dev/null

27
.venv/bin/activate.csh Normal file
View File

@@ -0,0 +1,27 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; unsetenv VIRTUAL_ENV_PROMPT; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelevant variables.
deactivate nondestructive
setenv VIRTUAL_ENV /home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/"bin":$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
set prompt = '(.venv) '"$prompt"
setenv VIRTUAL_ENV_PROMPT '(.venv) '
endif
alias pydoc python -m pydoc
rehash

69
.venv/bin/activate.fish Normal file
View File

@@ -0,0 +1,69 @@
# This file must be used with "source <venv>/bin/activate.fish" *from fish*
# (https://fishshell.com/). You cannot run it directly.
function deactivate -d "Exit virtual environment and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
set -e _OLD_FISH_PROMPT_OVERRIDE
# prevents error when using nested fish instances (Issue #93858)
if functions -q _old_fish_prompt
functions -e fish_prompt
functions -c _old_fish_prompt fish_prompt
functions -e _old_fish_prompt
end
end
set -e VIRTUAL_ENV
set -e VIRTUAL_ENV_PROMPT
if test "$argv[1]" != "nondestructive"
# Self-destruct!
functions -e deactivate
end
end
# Unset irrelevant variables.
deactivate nondestructive
set -gx VIRTUAL_ENV /home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/"bin $PATH
# Unset PYTHONHOME if set.
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# Save the current fish_prompt function as the function _old_fish_prompt.
functions -c fish_prompt _old_fish_prompt
# With the original prompt function renamed, we can override with our own.
function fish_prompt
# Save the return status of the last command.
set -l old_status $status
# Output the venv prompt; color taken from the blue of the Python logo.
printf "%s%s%s" (set_color 4B8BBE) '(.venv) ' (set_color normal)
# Restore the return status of the previous command.
echo "exit $old_status" | .
# Output the original/"old" prompt.
_old_fish_prompt
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
set -gx VIRTUAL_ENV_PROMPT '(.venv) '
end

8
.venv/bin/mpremote Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from mpremote.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pip Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pip3 Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pip3.12 Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pyserial-miniterm Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from serial.tools.miniterm import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

8
.venv/bin/pyserial-ports Executable file
View File

@@ -0,0 +1,8 @@
#!/home/tiijay/Development/Microcontroller/rapsberry_pico/micropython/weather-info/.venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from serial.tools.list_ports import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
.venv/bin/python Symbolic link
View File

@@ -0,0 +1 @@
python3

1
.venv/bin/python3 Symbolic link
View File

@@ -0,0 +1 @@
/usr/bin/python3

1
.venv/bin/python3.12 Symbolic link
View File

@@ -0,0 +1 @@
python3

View File

@@ -0,0 +1,28 @@
"""Allows for type checking of Micropython specific builtins by pyright and pylance.
"""
from typing import Tuple, TypeVar
Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple) # constant
def const(expr: Const_T) -> Const_T:
"""
Used to declare that the expression is a constant so that the compiler can
optimise it. The use of this function should be as follows::
from micropython import const
CONST_X = const(123)
CONST_Y = const(2 * CONST_X + 1)
Constants declared this way are still accessible as global variables from
outside the module they are declared in. On the other hand, if a constant
begins with an underscore then it is hidden, it is not available as a global
variable, and does not take up any memory during execution.
This `const` function is recognised directly by the MicroPython parser and is
provided as part of the :mod:`micropython` module mainly so that scripts can be
written which run under both CPython and MicroPython, by following the above
pattern.
"""
...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
bdev: Incomplete
fs: Incomplete

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
bdev: Incomplete

View File

@@ -0,0 +1,31 @@
"""
IRQ object types, used in the machine, bluetooth, _rp2 and rp2 modules
_IRQ is a union of the types _IRQ_ESP32, _IRQ_RP2 and _IRQ_PYB
to allow the same stubs to support of the different ports of MicroPython.
"""
from typing import Type
from _typeshed import Incomplete
from typing_extensions import TypeAlias
class _IRQ_ESP32:
def trigger(self) -> int: ...
# def flags(self) -> int: ...
class _IRQ_RP2:
# rp2040
# object <irq> is of type irq
# flags -- <function>
# trigger -- <function>
def flags(self) -> int: ...
def trigger(self) -> int: ...
# pybv11
# TODO: Not sure what the correct implementation is
# NoneType
_IRQ_PYB: TypeAlias = None
_IRQ: TypeAlias = Type[_IRQ_ESP32] | Type[_IRQ_RP2] | Type[_IRQ_PYB] | Incomplete

View File

@@ -0,0 +1,112 @@
"""
MicroPython-stubs base types that are not present in typeshed.
This is a collection of types that are not present in typeshed, but are used in the micropython stubs.
Common cases are:
- MicroPython implementation is different from CPython, so the types are different.
- MicroPython has some types that are not present in CPython.
"""
from __future__ import annotations
import abc # type: ignore - not collections.abc
import sys
from typing import Final, final
from _typeshed import Incomplete, structseq, AnyStr_co
from typing_extensions import TypeAlias, TypeVar
from .subscriptable import Subscriptable as Subscriptable
from .IRQs import _IRQ
from .neopixelbase import _NeoPixelBase as _NeoPixelBase
from .blockdevice import (
_BlockDeviceProtocol as _BlockDeviceProtocol,
_OldAbstractBlockDev,
_OldAbstractReadOnlyBlockDev,
)
from .buffer_mp import AnyReadableBuf as AnyReadableBuf, AnyWritableBuf as AnyWritableBuf
from .io_mp import (
BytesIO as BytesIO,
FileIO as FileIO,
IncrementalNewlineDecoder as IncrementalNewlineDecoder,
StringIO as StringIO,
TextIOWrapper as TextIOWrapper,
IOBase_mp as IOBase_mp,
_BufferedIOBase,
_IOBase,
_RawIOBase,
_TextIOBase,
open as open,
)
from .time_mp import _TimeTuple as _TimeTuple
from .pathlike import PathLike as PathLike
from .mp_implementation import _mp_implementation as _mp_implementation
from .mp_available import mp_available as mp_available
# ------------------
# copied from _typeshed os.pyi as os.pyi cannot import from a module with the same name
GenericAlias = type(list[int])
# ------------------------------------------------------------------------------------
StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes]
_StrOrBytesT = TypeVar("_StrOrBytesT", str, bytes)
# ------------------------------------------------------------------------------------
_AnyPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes]
_FdOrAnyPath: TypeAlias = int | _AnyPath
# ------------------------------------------------------------------------------------
# HID_Tuple is used in multiple pyb.submodules
HID_Tuple: TypeAlias = tuple[int, int, int, int, bytes]
# ------------------------------------------------------------------------------------
# copied from _typeshed os.pyi as os.pyi cannot import from a module with the same nam@final
@final
class uname_result(structseq[str], tuple[str, str, str, str, str]):
if sys.version_info >= (3, 8):
__match_args__: Final = ("sysname", "nodename", "release", "version", "machine")
@property
def sysname(self) -> str: ...
@property
def nodename(self) -> str: ...
@property
def release(self) -> str: ...
@property
def version(self) -> str: ...
@property
def machine(self) -> str: ...
# ------------------------------------------------------------------------------------
###########################
# HashLib
# manual addition to hashlib.pyi
class _Hash(abc.ABC):
"""
Abstract base class for hashing algorithms that defines methods available in all algorithms.
"""
def update(self, data: AnyReadableBuf, /) -> None:
"""
Feed more binary data into hash.
"""
def digest(self) -> bytes:
"""
Return hash for all data passed through hash, as a bytes object. After this
method is called, more data cannot be fed into the hash any longer.
"""
def hexdigest(self) -> str:
"""
This method is NOT implemented. Use ``binascii.hexlify(hash.digest())``
to achieve a similar effect.
"""

View File

@@ -0,0 +1,98 @@
import sys
from abc import abstractmethod
from types import MappingProxyType
from typing import AbstractSet as Set # noqa: Y022,Y038
from typing import AsyncGenerator as AsyncGenerator
from typing import AsyncIterable as AsyncIterable
from typing import AsyncIterator as AsyncIterator
from typing import Awaitable as Awaitable
from typing import Callable as Callable
from typing import Collection as Collection
from typing import Container as Container
from typing import Coroutine as Coroutine
from typing import Generator as Generator
from typing import Generic
from typing import Hashable as Hashable
from typing import ItemsView as ItemsView
from typing import Iterable as Iterable
from typing import Iterator as Iterator
from typing import KeysView as KeysView
from typing import Mapping as Mapping
from typing import MappingView as MappingView
from typing import MutableMapping as MutableMapping
from typing import MutableSequence as MutableSequence
from typing import MutableSet as MutableSet
from typing import Protocol
from typing import Reversible as Reversible
from typing import Sequence as Sequence
from typing import Sized as Sized
from typing import TypeVar
from typing import ValuesView as ValuesView
from typing import final, runtime_checkable
__all__ = [
"Awaitable",
"Coroutine",
"AsyncIterable",
"AsyncIterator",
"AsyncGenerator",
"Hashable",
"Iterable",
"Iterator",
"Generator",
"Reversible",
"Sized",
"Container",
"Callable",
"Collection",
"Set",
"MutableSet",
"Mapping",
"MutableMapping",
"MappingView",
"KeysView",
"ItemsView",
"ValuesView",
"Sequence",
"MutableSequence",
]
if sys.version_info < (3, 14):
from typing import ByteString as ByteString # noqa: Y057
__all__ += ["ByteString"]
if sys.version_info >= (3, 12):
__all__ += ["Buffer"]
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
@final
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[_KT_co], /) -> bool: ...
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@final
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@final
class dict_items(ItemsView[_KT_co, _VT_co]): # undocumented
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 13):
def isdisjoint(self, other: Iterable[tuple[_KT_co, _VT_co]], /) -> bool: ...
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
if sys.version_info >= (3, 12):
@runtime_checkable
class Buffer(Protocol):
@abstractmethod
def __buffer__(self, flags: int, /) -> memoryview: ...

View File

@@ -0,0 +1,201 @@
"""
The AbstractBlockDev class is a template for the design of block device classes,
MicroPython does not actually provide that class.
In the type stubs this is implemented as a Protocol, which is a Python 3.8 feature that allows for more flexible type checking.
It has been moved to the _mpy_shed type library for convinience, but may be relocated to the `vfs` stub module in the future.
- vfs.*(...)
- os.AbstractBlockDev(...)
- pyb.AbstractBlockDev(...)
- pyb.Flash(...)
- (u)os.AbstractBlockDev(...)
- esp32.Partition.ioctl
https://docs.micropython.org/en/v1.24.0/reference/filesystem.html?highlight=abstractblockdev
- https://docs.micropython.org/en/v1.24.0/library/vfs.html#vfs.AbstractBlockDev
- https://docs.micropython.org/en/v1.20.0/library/os.html?highlight=abstractblockdev#block-devices
- https://docs.micropython.org/en/v1.20.0/library/pyb.html?highlight=abstractblockdev#block-devices
- https://docs.micropython.org/en/latest/library/esp32.html#esp32.Partition.ioctl
"""
from typing import Any, Final, Literal, Protocol, Tuple, Type, final, overload, runtime_checkable
from _typeshed import AnyStr_co, Incomplete, structseq
from typing_extensions import TypeAlias, TypeVar
# TODO: improve the typechecking implementation if possible
_OldAbstractReadOnlyBlockDev: TypeAlias = Any
_OldAbstractBlockDev: TypeAlias = Any
# documented as AbstractBlockDev in the vfs module documentation
@runtime_checkable
class _BlockDeviceProtocol(Protocol):
"""
Block devices
-------------
A block device is an object which implements the block protocol. This enables a
device to support MicroPython filesystems. The physical hardware is represented
by a user defined class. The :class:`AbstractBlockDev` class is a template for
the design of such a class: MicroPython does not actually provide that class,
but an actual block device class must implement the methods described below.
A concrete implementation of this class will usually allow access to the
memory-like functionality of a piece of hardware (like flash memory). A block
device can be formatted to any supported filesystem and mounted using ``os``
methods.
See :ref:`filesystem` for example implementations of block devices using the
two variants of the block protocol described below.
.. _block-device-interface:
Simple and extended interface
.............................
There are two compatible signatures for the ``readblocks`` and ``writeblocks``
methods (see below), in order to support a variety of use cases. A given block
device may implement one form or the other, or both at the same time. The second
form (with the offset parameter) is referred to as the "extended interface".
Some filesystems (such as littlefs) that require more control over write
operations, for example writing to sub-block regions without erasing, may require
that the block device supports the extended interface.
"""
def __init__(self) -> None:
"""
Construct a block device object. The parameters to the constructor are
dependent on the specific block device.
"""
@overload
def readblocks(self, block_num: int, buf: bytearray, /) -> bool:
"""
The first form reads aligned, multiples of blocks.
Starting at the block given by the index *block_num*, read blocks from
the device into *buf* (an array of bytes).
The number of blocks to read is given by the length of *buf*,
which will be a multiple of the block size.
"""
@overload
def readblocks(self, block_num: int, buf: bytearray, offset: int, /) -> bool:
"""
The second form allows reading at arbitrary locations within a block,
and arbitrary lengths.
Starting at block index *block_num*, and byte offset within that block
of *offset*, read bytes from the device into *buf* (an array of bytes).
The number of bytes to read is given by the length of *buf*.
"""
@overload
def writeblocks(self, block_num: int, buf: bytes | bytearray, /) -> None:
"""
The first form writes aligned, multiples of blocks, and requires that the
blocks that are written to be first erased (if necessary) by this method.
Starting at the block given by the index *block_num*, write blocks from
*buf* (an array of bytes) to the device.
The number of blocks to write is given by the length of *buf*,
which will be a multiple of the block size.
The second form allows writing at arbitrary locations within a block,
and arbitrary lengths. Only the bytes being written should be changed,
and the caller of this method must ensure that the relevant blocks are
erased via a prior ``ioctl`` call.
Starting at block index *block_num*, and byte offset within that block
of *offset*, write bytes from *buf* (an array of bytes) to the device.
The number of bytes to write is given by the length of *buf*.
Note that implementations must never implicitly erase blocks if the offset
argument is specified, even if it is zero.
"""
@overload
def writeblocks(self, block_num: int, buf: bytes | bytearray, offset: int, /) -> None:
"""
The first form writes aligned, multiples of blocks, and requires that the
blocks that are written to be first erased (if necessary) by this method.
Starting at the block given by the index *block_num*, write blocks from
*buf* (an array of bytes) to the device.
The number of blocks to write is given by the length of *buf*,
which will be a multiple of the block size.
The second form allows writing at arbitrary locations within a block,
and arbitrary lengths. Only the bytes being written should be changed,
and the caller of this method must ensure that the relevant blocks are
erased via a prior ``ioctl`` call.
Starting at block index *block_num*, and byte offset within that block
of *offset*, write bytes from *buf* (an array of bytes) to the device.
The number of bytes to write is given by the length of *buf*.
Note that implementations must never implicitly erase blocks if the offset
argument is specified, even if it is zero.
"""
@overload
def ioctl(self, op: Literal[4, 5], arg: int) -> int:
"""
Control the block device and query its parameters. The operation to
perform is given by *op* which is one of the following integers:
- 1 -- initialise the device (*arg* is unused)
- 2 -- shutdown the device (*arg* is unused)
- 3 -- sync the device (*arg* is unused)
- 4 -- get a count of the number of blocks, should return an integer
(*arg* is unused)
- 5 -- get the number of bytes in a block, should return an integer,
or ``None`` in which case the default value of 512 is used
(*arg* is unused)
- 6 -- erase a block, *arg* is the block number to erase
As a minimum ``ioctl(4, ...)`` must be intercepted; for littlefs
``ioctl(6, ...)`` must also be intercepted. The need for others is
hardware dependent.
Prior to any call to ``writeblocks(block, ...)`` littlefs issues
``ioctl(6, block)``. This enables a device driver to erase the block
prior to a write if the hardware requires it. Alternatively a driver
might intercept ``ioctl(6, block)`` and return 0 (success). In this case
the driver assumes responsibility for detecting the need for erasure.
Unless otherwise stated ``ioctl(op, arg)`` can return ``None``.
Consequently an implementation can ignore unused values of ``op``. Where
``op`` is intercepted, the return value for operations 4 and 5 are as
detailed above. Other operations should return 0 on success and non-zero
for failure, with the value returned being an ``OSError`` errno code.
"""
@overload
def ioctl(self, op: Literal[1, 2, 3, 6], arg: int) -> int | None:
"""
Control the block device and query its parameters. The operation to
perform is given by *op* which is one of the following integers:
- 1 -- initialise the device (*arg* is unused)
- 2 -- shutdown the device (*arg* is unused)
- 3 -- sync the device (*arg* is unused)
- 4 -- get a count of the number of blocks, should return an integer
(*arg* is unused)
- 5 -- get the number of bytes in a block, should return an integer,
or ``None`` in which case the default value of 512 is used
(*arg* is unused)
- 6 -- erase a block, *arg* is the block number to erase
As a minimum ``ioctl(4, ...)`` must be intercepted; for littlefs
``ioctl(6, ...)`` must also be intercepted. The need for others is
hardware dependent.
Prior to any call to ``writeblocks(block, ...)`` littlefs issues
``ioctl(6, block)``. This enables a device driver to erase the block
prior to a write if the hardware requires it. Alternatively a driver
might intercept ``ioctl(6, block)`` and return 0 (success). In this case
the driver assumes responsibility for detecting the need for erasure.
Unless otherwise stated ``ioctl(op, arg)`` can return ``None``.
Consequently an implementation can ignore unused values of ``op``. Where
``op`` is intercepted, the return value for operations 4 and 5 are as
detailed above. Other operations should return 0 on success and non-zero
for failure, with the value returned being an ``OSError`` errno code.
"""

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete, structseq, AnyStr_co
from typing_extensions import TypeAlias, TypeVar
from array import array
# ------------------------------------------------------------------------------------
# TODO: need some to allow string to be passed in : uart_1.write("hello")
AnyReadableBuf: TypeAlias = bytearray | array | memoryview | bytes | Incomplete
AnyWritableBuf: TypeAlias = bytearray | array | memoryview | Incomplete

View File

@@ -0,0 +1,553 @@
import sys
from typing import Any, Generic, NoReturn, SupportsIndex, TypeVar, final, overload
from _collections_abc import dict_items, dict_keys, dict_values
from _typeshed import (
SupportsItems,
SupportsKeysAndGetItem,
SupportsRichComparison,
SupportsRichComparisonT,
)
from typing_extensions import Self
if sys.version_info >= (3, 9):
# from types import GenericAlias
from _mpy_shed import GenericAlias
if sys.version_info >= (3, 10):
from collections.abc import (
Callable,
ItemsView,
Iterable,
Iterator,
KeysView,
Mapping,
MutableMapping,
MutableSequence,
Sequence,
ValuesView,
)
else:
from _collections_abc import *
__all__ = [
"ChainMap",
"Counter",
"OrderedDict",
"UserDict",
"UserList",
"UserString",
"defaultdict",
"deque",
"namedtuple",
]
_S = TypeVar("_S")
_T = TypeVar("_T")
_T1 = TypeVar("_T1")
_T2 = TypeVar("_T2")
_KT = TypeVar("_KT")
_VT = TypeVar("_VT")
_KT_co = TypeVar("_KT_co", covariant=True)
_VT_co = TypeVar("_VT_co", covariant=True)
# namedtuple is special-cased in the type checker; the initializer is ignored.
def namedtuple(
typename: str,
field_names: str | Iterable[str],
*,
rename: bool = False,
module: str | None = None,
defaults: Iterable[Any] | None = None,
) -> type[tuple[Any, ...]]: ...
class UserDict(MutableMapping[_KT, _VT]):
data: dict[_KT, _VT]
# __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics
@overload
def __init__(self, dict: None = None, /) -> None: ...
@overload
def __init__(
self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
dict: None = None,
/,
**kwargs: _VT, # pyright: ignore[reportInvalidTypeVarUse] #11780
) -> None: ...
@overload
def __init__(self, dict: SupportsKeysAndGetItem[_KT, _VT], /) -> None: ...
@overload
def __init__(
self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
dict: SupportsKeysAndGetItem[str, _VT],
/,
**kwargs: _VT,
) -> None: ...
@overload
def __init__(self, iterable: Iterable[tuple[_KT, _VT]], /) -> None: ...
@overload
def __init__(
self: UserDict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
iterable: Iterable[tuple[str, _VT]],
/,
**kwargs: _VT,
) -> None: ...
@overload
def __init__(self: UserDict[str, str], iterable: Iterable[list[str]], /) -> None: ...
@overload
def __init__(self: UserDict[bytes, bytes], iterable: Iterable[list[bytes]], /) -> None: ...
def __len__(self) -> int: ...
def __getitem__(self, key: _KT) -> _VT: ...
def __setitem__(self, key: _KT, item: _VT) -> None: ...
def __delitem__(self, key: _KT) -> None: ...
def __iter__(self) -> Iterator[_KT]: ...
def __contains__(self, key: object) -> bool: ...
def copy(self) -> Self: ...
def __copy__(self) -> Self: ...
# `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`.
# TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system.
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T], value: None = None) -> UserDict[_T, Any | None]: ...
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ...
if sys.version_info >= (3, 9):
@overload
def __or__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
@overload
def __or__(
self, other: UserDict[_T1, _T2] | dict[_T1, _T2]
) -> UserDict[_KT | _T1, _VT | _T2]: ...
@overload
def __ror__(self, other: UserDict[_KT, _VT] | dict[_KT, _VT]) -> Self: ...
@overload
def __ror__(
self, other: UserDict[_T1, _T2] | dict[_T1, _T2]
) -> UserDict[_KT | _T1, _VT | _T2]: ...
# UserDict.__ior__ should be kept roughly in line with MutableMapping.update()
@overload # type: ignore[misc]
def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...
@overload
def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ...
if sys.version_info >= (3, 12):
@overload
def get(self, key: _KT, default: None = None) -> _VT | None: ...
@overload
def get(self, key: _KT, default: _T) -> _VT | _T: ...
class UserList(MutableSequence[_T]):
data: list[_T]
@overload
def __init__(self, initlist: None = None) -> None: ...
@overload
def __init__(self, initlist: Iterable[_T]) -> None: ...
def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ...
def __le__(self, other: list[_T] | UserList[_T]) -> bool: ...
def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ...
def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ...
def __eq__(self, other: object) -> bool: ...
def __contains__(self, item: object) -> bool: ...
def __len__(self) -> int: ...
@overload
def __getitem__(self, i: SupportsIndex) -> _T: ...
@overload
def __getitem__(self, i: slice) -> Self: ...
@overload
def __setitem__(self, i: SupportsIndex, item: _T) -> None: ...
@overload
def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ...
def __delitem__(self, i: SupportsIndex | slice) -> None: ...
def __add__(self, other: Iterable[_T]) -> Self: ...
def __radd__(self, other: Iterable[_T]) -> Self: ...
def __iadd__(self, other: Iterable[_T]) -> Self: ...
def __mul__(self, n: int) -> Self: ...
def __rmul__(self, n: int) -> Self: ...
def __imul__(self, n: int) -> Self: ...
def append(self, item: _T) -> None: ...
def insert(self, i: int, item: _T) -> None: ...
def pop(self, i: int = -1) -> _T: ...
def remove(self, item: _T) -> None: ...
def copy(self) -> Self: ...
def __copy__(self) -> Self: ...
def count(self, item: _T) -> int: ...
# The runtime signature is "item, *args", and the arguments are then passed
# to `list.index`. In order to give more precise types, we pretend that the
# `item` argument is positional-only.
def index(
self, item: _T, start: SupportsIndex = 0, stop: SupportsIndex = sys.maxsize, /
) -> int: ...
# All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`.
@overload
def sort(
self: UserList[SupportsRichComparisonT], *, key: None = None, reverse: bool = False
) -> None: ...
@overload
def sort(
self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = False
) -> None: ...
def extend(self, other: Iterable[_T]) -> None: ...
class UserString(Sequence[UserString]):
data: str
def __init__(self, seq: object) -> None: ...
def __int__(self) -> int: ...
def __float__(self) -> float: ...
def __complex__(self) -> complex: ...
def __getnewargs__(self) -> tuple[str]: ...
def __lt__(self, string: str | UserString) -> bool: ...
def __le__(self, string: str | UserString) -> bool: ...
def __gt__(self, string: str | UserString) -> bool: ...
def __ge__(self, string: str | UserString) -> bool: ...
def __eq__(self, string: object) -> bool: ...
def __hash__(self) -> int: ...
def __contains__(self, char: object) -> bool: ...
def __len__(self) -> int: ...
def __getitem__(self, index: SupportsIndex | slice) -> Self: ...
def __iter__(self) -> Iterator[Self]: ...
def __reversed__(self) -> Iterator[Self]: ...
def __add__(self, other: object) -> Self: ...
def __radd__(self, other: object) -> Self: ...
def __mul__(self, n: int) -> Self: ...
def __rmul__(self, n: int) -> Self: ...
def __mod__(self, args: Any) -> Self: ...
def __rmod__(self, template: object) -> Self: ...
def capitalize(self) -> Self: ...
def casefold(self) -> Self: ...
def center(self, width: int, *args: Any) -> Self: ...
def count(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
def encode(
self: UserString, encoding: str | None = "utf-8", errors: str | None = "strict"
) -> bytes: ...
def endswith(
self, suffix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize
) -> bool: ...
def expandtabs(self, tabsize: int = 8) -> Self: ...
def find(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
def format(self, *args: Any, **kwds: Any) -> str: ...
def format_map(self, mapping: Mapping[str, Any]) -> str: ...
def index(self, sub: str, start: int = 0, end: int = sys.maxsize) -> int: ...
def isalpha(self) -> bool: ...
def isalnum(self) -> bool: ...
def isdecimal(self) -> bool: ...
def isdigit(self) -> bool: ...
def isidentifier(self) -> bool: ...
def islower(self) -> bool: ...
def isnumeric(self) -> bool: ...
def isprintable(self) -> bool: ...
def isspace(self) -> bool: ...
def istitle(self) -> bool: ...
def isupper(self) -> bool: ...
def isascii(self) -> bool: ...
def join(self, seq: Iterable[str]) -> str: ...
def ljust(self, width: int, *args: Any) -> Self: ...
def lower(self) -> Self: ...
def lstrip(self, chars: str | None = None) -> Self: ...
maketrans = str.maketrans
def partition(self, sep: str) -> tuple[str, str, str]: ...
if sys.version_info >= (3, 9):
def removeprefix(self, prefix: str | UserString, /) -> Self: ...
def removesuffix(self, suffix: str | UserString, /) -> Self: ...
def replace(
self, old: str | UserString, new: str | UserString, maxsplit: int = -1
) -> Self: ...
def rfind(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
def rindex(self, sub: str | UserString, start: int = 0, end: int = sys.maxsize) -> int: ...
def rjust(self, width: int, *args: Any) -> Self: ...
def rpartition(self, sep: str) -> tuple[str, str, str]: ...
def rstrip(self, chars: str | None = None) -> Self: ...
def split(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ...
def rsplit(self, sep: str | None = None, maxsplit: int = -1) -> list[str]: ...
def splitlines(self, keepends: bool = False) -> list[str]: ...
def startswith(
self, prefix: str | tuple[str, ...], start: int | None = 0, end: int | None = sys.maxsize
) -> bool: ...
def strip(self, chars: str | None = None) -> Self: ...
def swapcase(self) -> Self: ...
def title(self) -> Self: ...
def translate(self, *args: Any) -> Self: ...
def upper(self) -> Self: ...
def zfill(self, width: int) -> Self: ...
class deque(MutableSequence[_T]):
@property
def maxlen(self) -> int | None: ...
@overload
def __init__(self, *, maxlen: int | None = None) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T], maxlen: int | None = None) -> None: ...
def append(self, x: _T, /) -> None: ...
def appendleft(self, x: _T, /) -> None: ...
def copy(self) -> Self: ...
def count(self, x: _T, /) -> int: ...
def extend(self, iterable: Iterable[_T], /) -> None: ...
def extendleft(self, iterable: Iterable[_T], /) -> None: ...
def insert(self, i: int, x: _T, /) -> None: ...
def index(self, x: _T, start: int = 0, stop: int = ..., /) -> int: ...
def pop(self) -> _T: ... # type: ignore[override]
def popleft(self) -> _T: ...
def remove(self, value: _T, /) -> None: ...
def rotate(self, n: int = 1, /) -> None: ...
def __copy__(self) -> Self: ...
def __len__(self) -> int: ...
# These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores
def __getitem__(self, key: SupportsIndex, /) -> _T: ... # type: ignore[override]
def __setitem__(self, key: SupportsIndex, value: _T, /) -> None: ... # type: ignore[override]
def __delitem__(self, key: SupportsIndex, /) -> None: ... # type: ignore[override]
def __contains__(self, key: object, /) -> bool: ...
def __reduce__(self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ...
def __iadd__(self, value: Iterable[_T], /) -> Self: ...
def __add__(self, value: Self, /) -> Self: ...
def __mul__(self, value: int, /) -> Self: ...
def __imul__(self, value: int, /) -> Self: ...
def __lt__(self, value: deque[_T], /) -> bool: ...
def __le__(self, value: deque[_T], /) -> bool: ...
def __gt__(self, value: deque[_T], /) -> bool: ...
def __ge__(self, value: deque[_T], /) -> bool: ...
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any, /) -> GenericAlias: ...
class Counter(dict[_T, int], Generic[_T]):
@overload
def __init__(self, iterable: None = None, /) -> None: ...
@overload
def __init__(self: Counter[str], iterable: None = None, /, **kwargs: int) -> None: ...
@overload
def __init__(self, mapping: SupportsKeysAndGetItem[_T, int], /) -> None: ...
@overload
def __init__(self, iterable: Iterable[_T], /) -> None: ...
def copy(self) -> Self: ...
def elements(self) -> Iterator[_T]: ...
def most_common(self, n: int | None = None) -> list[tuple[_T, int]]: ...
@classmethod
def fromkeys(cls, iterable: Any, v: int | None = None) -> NoReturn: ... # type: ignore[override]
@overload
def subtract(self, iterable: None = None, /) -> None: ...
@overload
def subtract(self, mapping: Mapping[_T, int], /) -> None: ...
@overload
def subtract(self, iterable: Iterable[_T], /) -> None: ...
# Unlike dict.update(), use Mapping instead of SupportsKeysAndGetItem for the first overload
# (source code does an `isinstance(other, Mapping)` check)
#
# The second overload is also deliberately different to dict.update()
# (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`,
# the tuples would be added as keys, breaking type safety)
@overload # type: ignore[override]
def update(self, m: Mapping[_T, int], /, **kwargs: int) -> None: ...
@overload
def update(self, iterable: Iterable[_T], /, **kwargs: int) -> None: ...
@overload
def update(self, iterable: None = None, /, **kwargs: int) -> None: ...
def __missing__(self, key: _T) -> int: ...
def __delitem__(self, elem: object) -> None: ...
if sys.version_info >= (3, 10):
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ...
def __sub__(self, other: Counter[_T]) -> Counter[_T]: ...
def __and__(self, other: Counter[_T]) -> Counter[_T]: ...
def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override]
def __pos__(self) -> Counter[_T]: ...
def __neg__(self) -> Counter[_T]: ...
# several type: ignores because __iadd__ is supposedly incompatible with __add__, etc.
def __iadd__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[misc]
def __isub__(self, other: SupportsItems[_T, int]) -> Self: ...
def __iand__(self, other: SupportsItems[_T, int]) -> Self: ...
def __ior__(self, other: SupportsItems[_T, int]) -> Self: ... # type: ignore[override,misc]
if sys.version_info >= (3, 10):
def total(self) -> int: ...
def __le__(self, other: Counter[Any]) -> bool: ...
def __lt__(self, other: Counter[Any]) -> bool: ...
def __ge__(self, other: Counter[Any]) -> bool: ...
def __gt__(self, other: Counter[Any]) -> bool: ...
# The pure-Python implementations of the "views" classes
# These are exposed at runtime in `collections/__init__.py`
class _OrderedDictKeysView(KeysView[_KT_co]):
def __reversed__(self) -> Iterator[_KT_co]: ...
class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co]):
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
class _OrderedDictValuesView(ValuesView[_VT_co]):
def __reversed__(self) -> Iterator[_VT_co]: ...
# The C implementations of the "views" classes
# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`,
# but they are not exposed anywhere)
# pyright doesn't have a specific error code for subclassing error!
@final
class _odict_keys(dict_keys[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __reversed__(self) -> Iterator[_KT_co]: ...
@final
class _odict_items(dict_items[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ...
@final
class _odict_values(dict_values[_KT_co, _VT_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
def __reversed__(self) -> Iterator[_VT_co]: ...
class OrderedDict(dict[_KT, _VT]):
def popitem(self, last: bool = True) -> tuple[_KT, _VT]: ...
def move_to_end(self, key: _KT, last: bool = True) -> None: ...
def copy(self) -> Self: ...
def __reversed__(self) -> Iterator[_KT]: ...
def keys(self) -> _odict_keys[_KT, _VT]: ...
def items(self) -> _odict_items[_KT, _VT]: ...
def values(self) -> _odict_values[_KT, _VT]: ...
# The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences.
# Like dict.fromkeys, its true signature is not expressible in the current type system.
# See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963.
@classmethod
@overload
def fromkeys(
cls, iterable: Iterable[_T], value: None = None
) -> OrderedDict[_T, Any | None]: ...
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ...
# Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.
@overload
def setdefault(
self: OrderedDict[_KT, _T | None], key: _KT, default: None = None
) -> _T | None: ...
@overload
def setdefault(self, key: _KT, default: _VT) -> _VT: ...
# Same as dict.pop, but accepts keyword arguments
@overload
def pop(self, key: _KT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _VT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _T) -> _VT | _T: ...
def __eq__(self, value: object, /) -> bool: ...
if sys.version_info >= (3, 9):
@overload
def __or__(self, value: dict[_KT, _VT], /) -> Self: ...
@overload
def __or__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ...
@overload
def __ror__(self, value: dict[_KT, _VT], /) -> Self: ...
@overload
def __ror__(self, value: dict[_T1, _T2], /) -> OrderedDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc]
class defaultdict(dict[_KT, _VT]):
default_factory: Callable[[], _VT] | None
@overload
def __init__(self) -> None: ...
@overload
def __init__(
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
**kwargs: _VT,
) -> None: ... # pyright: ignore[reportInvalidTypeVarUse] #11780
@overload
def __init__(self, default_factory: Callable[[], _VT] | None, /) -> None: ...
@overload
def __init__(
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
default_factory: Callable[[], _VT] | None,
/,
**kwargs: _VT,
) -> None: ...
@overload
def __init__(
self, default_factory: Callable[[], _VT] | None, map: SupportsKeysAndGetItem[_KT, _VT], /
) -> None: ...
@overload
def __init__(
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
default_factory: Callable[[], _VT] | None,
map: SupportsKeysAndGetItem[str, _VT],
/,
**kwargs: _VT,
) -> None: ...
@overload
def __init__(
self, default_factory: Callable[[], _VT] | None, iterable: Iterable[tuple[_KT, _VT]], /
) -> None: ...
@overload
def __init__(
self: defaultdict[str, _VT], # pyright: ignore[reportInvalidTypeVarUse] #11780
default_factory: Callable[[], _VT] | None,
iterable: Iterable[tuple[str, _VT]],
/,
**kwargs: _VT,
) -> None: ...
def __missing__(self, key: _KT, /) -> _VT: ...
def __copy__(self) -> Self: ...
def copy(self) -> Self: ...
if sys.version_info >= (3, 9):
@overload
def __or__(self, value: dict[_KT, _VT], /) -> Self: ...
@overload
def __or__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ...
@overload
def __ror__(self, value: dict[_KT, _VT], /) -> Self: ...
@overload
def __ror__(self, value: dict[_T1, _T2], /) -> defaultdict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc]
class ChainMap(MutableMapping[_KT, _VT]):
maps: list[MutableMapping[_KT, _VT]]
def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ...
def new_child(self, m: MutableMapping[_KT, _VT] | None = None) -> Self: ...
@property
def parents(self) -> Self: ...
def __setitem__(self, key: _KT, value: _VT) -> None: ...
def __delitem__(self, key: _KT) -> None: ...
def __getitem__(self, key: _KT) -> _VT: ...
def __iter__(self) -> Iterator[_KT]: ...
def __len__(self) -> int: ...
def __contains__(self, key: object) -> bool: ...
@overload
def get(self, key: _KT, default: None = None) -> _VT | None: ...
@overload
def get(self, key: _KT, default: _T) -> _VT | _T: ...
def __missing__(self, key: _KT) -> _VT: ... # undocumented
def __bool__(self) -> bool: ...
# Keep ChainMap.setdefault in line with MutableMapping.setdefault, modulo positional-only differences.
@overload
def setdefault(
self: ChainMap[_KT, _T | None], key: _KT, default: None = None
) -> _T | None: ...
@overload
def setdefault(self, key: _KT, default: _VT) -> _VT: ...
@overload
def pop(self, key: _KT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _VT) -> _VT: ...
@overload
def pop(self, key: _KT, default: _T) -> _VT | _T: ...
def copy(self) -> Self: ...
__copy__ = copy
# All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime,
# so the signature should be kept in line with `dict.fromkeys`.
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T]) -> ChainMap[_T, Any | None]: ...
@classmethod
@overload
# Special-case None: the user probably wants to add non-None values later.
def fromkeys(cls, iterable: Iterable[_T], value: None, /) -> ChainMap[_T, Any | None]: ...
@classmethod
@overload
def fromkeys(cls, iterable: Iterable[_T], value: _S, /) -> ChainMap[_T, _S]: ...
if sys.version_info >= (3, 9):
@overload
def __or__(self, other: Mapping[_KT, _VT]) -> Self: ...
@overload
def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
@overload
def __ror__(self, other: Mapping[_KT, _VT]) -> Self: ...
@overload
def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ...
# ChainMap.__ior__ should be kept roughly in line with MutableMapping.update()
@overload # type: ignore[misc]
def __ior__(self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ...
@overload
def __ior__(self, other: Iterable[tuple[_KT, _VT]]) -> Self: ...

View File

@@ -0,0 +1,3 @@
from _collections_abc import *
# from _collections_abc import __all__ as __all__

View File

@@ -0,0 +1,87 @@
"""
Type aliases for the Micropython specific modes used in the `open` function.
References:
- https://docs.micropython.org/en/latest/library/io.html#conceptual-hierarchy
- https://docs.python.org/3/library/io.html
"""
# MIT License
# Howard C Lovatt, 2020 onwards.
# Jos Verlinde, 2025 onwards.
from typing import Literal
from typing_extensions import TypeAlias
_OpenTextModeUpdating: TypeAlias = Literal[
"r+",
"+r",
"rt+",
"r+t",
"+rt",
"tr+",
"t+r",
"+tr",
"w+",
"+w",
"wt+",
"w+t",
"+wt",
"tw+",
"t+w",
"+tw",
"a+",
"+a",
"at+",
"a+t",
"+at",
"ta+",
"t+a",
"+ta",
"x+",
"+x",
"xt+",
"x+t",
"+xt",
"tx+",
"t+x",
"+tx",
]
_OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"]
_OpenTextModeReading: TypeAlias = Literal[
"r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"
]
_OpenTextMode: TypeAlias = _OpenTextModeUpdating | _OpenTextModeWriting | _OpenTextModeReading
_OpenBinaryModeUpdating: TypeAlias = Literal[
"rb+",
"r+b",
"+rb",
"br+",
"b+r",
"+br",
"wb+",
"w+b",
"+wb",
"bw+",
"b+w",
"+bw",
"ab+",
"a+b",
"+ab",
"ba+",
"b+a",
"+ba",
"xb+",
"x+b",
"+xb",
"bx+",
"b+x",
"+bx",
]
_OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"]
_OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"]
_OpenBinaryMode: TypeAlias = (
_OpenBinaryModeUpdating | _OpenBinaryModeReading | _OpenBinaryModeWriting
)

View File

@@ -0,0 +1,52 @@
# ------------------
# from typeshed/stdlib/io.pyi
import abc
from types import TracebackType
from typing import TypeVar
from _io import BytesIO as BytesIO
from _io import FileIO as FileIO
from _io import IncrementalNewlineDecoder as IncrementalNewlineDecoder
from _io import StringIO as StringIO
from _io import TextIOWrapper as TextIOWrapper
from _io import _BufferedIOBase, _IOBase, _RawIOBase, _TextIOBase
from _io import open as open
from typing_extensions import Self
from .buffer_mp import AnyReadableBuf, AnyWritableBuf
from .pathlike import PathLike
class IOBase_mp(_IOBase, metaclass=abc.ABCMeta): ...
# class IOBase_mp(Stream, metaclass=abc.ABCMeta): ...
# Andy
#
# class Stream(metaclass=abc.ABCMeta):
# """
# MicroPython stream "base class". Due to implementation mechanism
# not all methods are guaranteed to be available on all classes
# based on the stream type / protocol.
# """
# def __init__(self, *argv, **kwargs) -> None: ...
# def __enter__(self: Self) -> Self: ...
# def __exit__(
# self,
# exc_type: type[BaseException] | None,
# exc_val: BaseException | None,
# exc_tb: TracebackType | None,
# ) -> None: ...
# def close(self) -> None: ...
# def flush(self) -> None: ...
# def read(self, __size: int | None = ...) -> bytes: ...
# def read1(self, __size: int = ...) -> bytes: ...
# def readinto(self, __buffer: AnyWritableBuf) -> int: ...
# def readline(self, __size: int | None = ...) -> bytes: ...
# def readlines(self, __hint: int = ...) -> list[bytes]: ...
# def seek(self, __offset: int, __whence: int = ...) -> int: ...
# def tell(self) -> int: ...
# def write(self, __buffer: AnyReadableBuf) -> int: ...
# def write1(self, __buffer: AnyReadableBuf) -> int: ...
# Howard
_OpenFile = TypeVar("_OpenFile", str, bytes, PathLike[str], PathLike[bytes], int)

View File

@@ -0,0 +1,32 @@
from typing import Callable, Iterable, Tuple, TypeVar, Union
"""
Decorator to annotate objects with the MicroPython ports they are available on.
Usage:
@mp_available(port="esp32")
def foo(): ...
@mp_available(port=["esp32", "rp2"])
class Bar: ...
"""
__all__ = ["mp_available"]
T = TypeVar("T") # Works for functions, classes, and other callables
def mp_available(
*,
port: Union[str, Iterable[str]] =["*"],
version: Union[str, Iterable[str]] =["*"],
macro: Union[str, Iterable[str]] =["*"],
) -> Callable[[T], T]:
"""
Decorator factory that marks an object as available on the given MicroPython ports.
The ports list is stored on the decorated object as __mp_available_ports__.
"""
def decorator(obj: T) -> T:
return obj
return decorator

View File

@@ -0,0 +1,24 @@
"""
MicroPython version of the sys.implementation object
"""
from typing import Any, Tuple
class _mp_implementation():
"""
This object is the recommended way to distinguish MicroPython from other Python implementations (note that it still may not exist in the very minimal ports).
Starting with version 1.22.0-preview, the fourth node releaselevel in implementation.version is either an empty string or "preview".
"""
name: str
version: Tuple[int,int,int, str]
_machine: str
"string describing the underlying machine"
_mpy: int
"supported mpy file-format version (optional attribute)"
_build: str
"string that can help identify the configuration that MicroPython was built with"
# Define __getattr__, as the documentation states:
# > sys.implementation may contain additional attributes specific to the Python implementation.
# > These non-standard attributes must start with an underscore, and are not described here.
def __getattr__(self, name: str) -> Any: ...

View File

@@ -0,0 +1,26 @@
###########################
# neopixel
from typing import Tuple
class _NeoPixelBase:
"""
a class to add a few missing methods to the NeoPixel class
"""
def __len__(self) -> int:
"""
Returns the number of LEDs in the strip.
"""
...
def __setitem__(self, index: int, val, /) -> None:
"""
Set the pixel at *index* to the value, which is an RGB/RGBW tuple.
"""
...
def __getitem__(self, index: int, /) -> Tuple:
"""
Returns the pixel at *index* as an RGB/RGBW tuple.
"""
...

View File

@@ -0,0 +1,17 @@
"""
pathlike is used in multiple stdlib stubs - but does not exists in MicroPython
copied from typeshed/stdlib/os.pyi as os.pyi cannot import from a module with the same name
"""
import abc
from typing import Protocol, Tuple, runtime_checkable
from _typeshed import AnyStr_co
# mypy and pyright object to this being both ABC and Protocol.
# At runtime it inherits from ABC and is not a Protocol, but it will be
# on the allowlist for use as a Protocol starting in 3.14.
@runtime_checkable
class PathLike(ABC, Protocol[AnyStr_co]): # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues]
@abc.abstractmethod
def __fspath__(self) -> AnyStr_co: ...

View File

@@ -0,0 +1,17 @@
from typing import Protocol, TypeVar, runtime_checkable
_T_Co = TypeVar("_T_Co", covariant=True)
@runtime_checkable
class Subscriptable(Protocol[_T_Co]):
"""A `Protocol` (structurally typed) for an object that is subscriptable and of finite length."""
__slots__ = ()
def __len__(self) -> int:
"""Number of elements, normally called via `len(x)` where `x` is an object that implements this protocol."""
def __getitem__(self, index: int) -> _T_Co:
"""
Element at the given index,
normally called via `x[index]` where `x` is an object that implements this protocol.
"""

View File

@@ -0,0 +1,15 @@
"""
The tuple to pass or receive from the time methods is unfortunately
defined differently on different ports, boards and versions of MicroPython.
The _Time8Tuple and _Time9Tuple are the most common ones, and are unified in the _TimeTuple.
As this still does not cover all cases, the _TimeTuple is a union of the two common cases and the generic Tuple.
"""
from typing import Tuple
from typing_extensions import TypeAlias
_Time8Tuple: TypeAlias = Tuple[int, int, int, int, int, int, int, int]
_Time9Tuple: TypeAlias = Tuple[int, int, int, int, int, int, int, int, int]
_TimeTuple: TypeAlias = _Time8Tuple | _Time9Tuple | Tuple[int, ...]

View File

@@ -0,0 +1,15 @@
"""
Module: '_onewire' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
def reset(*args, **kwargs) -> Incomplete: ...
def writebyte(*args, **kwargs) -> Incomplete: ...
def writebit(*args, **kwargs) -> Incomplete: ...
def crc8(*args, **kwargs) -> Incomplete: ...
def readbyte(*args, **kwargs) -> Incomplete: ...
def readbit(*args, **kwargs) -> Incomplete: ...

View File

@@ -0,0 +1,33 @@
"""
Multithreading support.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/_thread.html
CPython module: :mod:`python:_thread` https://docs.python.org/3/library/_thread.html .
This module implements multithreading support.
This module is highly experimental and its API is not yet fully settled
and not yet described in this documentation.
---
Module: '_thread' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing_extensions import Awaitable, TypeAlias, TypeVar
def get_ident(*args, **kwargs) -> Incomplete: ...
def start_new_thread(*args, **kwargs) -> Incomplete: ...
def stack_size(*args, **kwargs) -> Incomplete: ...
def exit(*args, **kwargs) -> Incomplete: ...
def allocate_lock(*args, **kwargs) -> Incomplete: ...
class LockType:
def locked(self, *args, **kwargs) -> Incomplete: ...
def release(self, *args, **kwargs) -> Incomplete: ...
def acquire(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...

View File

@@ -0,0 +1,61 @@
"""
Binary/ASCII conversions.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/binascii.html
CPython module: :mod:`python:binascii` https://docs.python.org/3/library/binascii.html .
This module implements conversions between binary data and various
encodings of it in ASCII form (in both directions).
---
Module: 'binascii' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing import Any, Optional
from typing_extensions import Awaitable, TypeAlias, TypeVar
def crc32(data, value: Optional[Any] = None) -> Incomplete:
"""
Compute CRC-32, the 32-bit checksum of *data*, starting with an initial CRC
of *value*. The default initial CRC is zero. The algorithm is consistent
with the ZIP file checksum.
"""
...
def hexlify(data: bytes, sep: str | bytes = ..., /) -> bytes:
"""
Convert the bytes in the *data* object to a hexadecimal representation.
Returns a bytes object.
If the additional argument *sep* is supplied it is used as a separator
between hexadecimal values.
"""
...
def unhexlify(data: str | bytes, /) -> bytes:
"""
Convert hexadecimal data to binary representation. Returns bytes string.
(i.e. inverse of hexlify)
"""
...
def b2a_base64(data: bytes, /) -> bytes:
"""
Encode binary data in base64 format, as in `RFC 3548
<https://tools.ietf.org/html/rfc3548.html>`_. Returns the encoded data
followed by a newline character if newline is true, as a bytes object.
"""
...
def a2b_base64(data: str | bytes, /) -> bytes:
"""
Decode base64-encoded data, ignoring invalid characters in the input.
Conforms to `RFC 2045 s.6.8 <https://tools.ietf.org/html/rfc2045#section-6.8>`_.
Returns a bytes object.
"""
...

View File

@@ -0,0 +1,82 @@
"""
Mathematical functions for complex numbers.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/cmath.html
CPython module: :mod:`python:cmath` https://docs.python.org/3/library/cmath.html .
The ``cmath`` module provides some basic mathematical functions for
working with complex numbers.
Availability: not available on WiPy and ESP8266. Floating point support
required for this module.
---
Module: 'cmath' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing import SupportsComplex, SupportsFloat, SupportsIndex, Tuple
from typing_extensions import Awaitable, TypeAlias, TypeVar
_C: TypeAlias = SupportsFloat | SupportsComplex | SupportsIndex | complex
e: float = 2.7182818
pi: float = 3.1415928
def polar(z: _C, /) -> Tuple:
"""
Returns, as a tuple, the polar form of ``z``.
"""
...
def sqrt(z: _C, /) -> complex:
"""
Return the square-root of ``z``.
"""
...
def rect(r: float, phi: float, /) -> float:
"""
Returns the complex number with modulus ``r`` and phase ``phi``.
"""
...
def sin(z: _C, /) -> float:
"""
Return the sine of ``z``.
"""
...
def exp(z: _C, /) -> float:
"""
Return the exponential of ``z``.
"""
...
def cos(z: _C, /) -> float:
"""
Return the cosine of ``z``.
"""
...
def phase(z: _C, /) -> float:
"""
Returns the phase of the number ``z``, in the range (-pi, +pi].
"""
...
def log(z: _C, /) -> float:
"""
Return the natural logarithm of ``z``. The branch cut is along the negative real axis.
"""
...
def log10(z: _C, /) -> float:
"""
Return the base-10 logarithm of ``z``. The branch cut is along the negative real axis.
"""
...

View File

@@ -0,0 +1,165 @@
"""
Cryptographic ciphers.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/cryptolib.html
---
Module: 'cryptolib' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from _mpy_shed import AnyReadableBuf, AnyWritableBuf
from typing import overload
from typing_extensions import Awaitable, TypeAlias, TypeVar
class aes:
"""
.. class:: aes
"""
@overload
def encrypt(self, in_buf: AnyReadableBuf, /) -> bytes:
"""
Encrypt *in_buf*. If no *out_buf* is given result is returned as a
newly allocated `bytes` object. Otherwise, result is written into
mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer
to the same mutable buffer, in which case data is encrypted in-place.
"""
@overload
def encrypt(self, in_buf: AnyReadableBuf, out_buf: AnyWritableBuf, /) -> None:
"""
Encrypt *in_buf*. If no *out_buf* is given result is returned as a
newly allocated `bytes` object. Otherwise, result is written into
mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer
to the same mutable buffer, in which case data is encrypted in-place.
"""
@overload
def encrypt(self, in_buf: AnyReadableBuf, /) -> bytes:
"""
Encrypt *in_buf*. If no *out_buf* is given result is returned as a
newly allocated `bytes` object. Otherwise, result is written into
mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer
to the same mutable buffer, in which case data is encrypted in-place.
"""
@overload
def encrypt(self, in_buf: AnyReadableBuf, out_buf: AnyWritableBuf, /) -> None:
"""
Encrypt *in_buf*. If no *out_buf* is given result is returned as a
newly allocated `bytes` object. Otherwise, result is written into
mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer
to the same mutable buffer, in which case data is encrypted in-place.
"""
@overload
def decrypt(self, in_buf: AnyReadableBuf, /) -> bytes:
"""
Like `encrypt()`, but for decryption.
"""
@overload
def decrypt(self, in_buf: AnyReadableBuf, out_buf: AnyWritableBuf, /) -> None:
"""
Like `encrypt()`, but for decryption.
"""
@overload
def decrypt(self, in_buf: AnyReadableBuf, /) -> bytes:
"""
Like `encrypt()`, but for decryption.
"""
@overload
def decrypt(self, in_buf: AnyReadableBuf, out_buf: AnyWritableBuf, /) -> None:
"""
Like `encrypt()`, but for decryption.
"""
@overload
def __init__(self, key: AnyReadableBuf, mode: int, /):
"""
Initialize cipher object, suitable for encryption/decryption. Note:
after initialization, cipher object can be use only either for
encryption or decryption. Running decrypt() operation after encrypt()
or vice versa is not supported.
Parameters are:
* *key* is an encryption/decryption key (bytes-like).
* *mode* is:
* ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB).
* ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC).
* ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR).
* *IV* is an initialization vector for CBC mode.
* For Counter mode, *IV* is the initial value for the counter.
"""
@overload
def __init__(self, key: AnyReadableBuf, mode: int, IV: AnyReadableBuf, /):
"""
Initialize cipher object, suitable for encryption/decryption. Note:
after initialization, cipher object can be use only either for
encryption or decryption. Running decrypt() operation after encrypt()
or vice versa is not supported.
Parameters are:
* *key* is an encryption/decryption key (bytes-like).
* *mode* is:
* ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB).
* ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC).
* ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR).
* *IV* is an initialization vector for CBC mode.
* For Counter mode, *IV* is the initial value for the counter.
"""
@overload
def __init__(self, key: AnyReadableBuf, mode: int, /):
"""
Initialize cipher object, suitable for encryption/decryption. Note:
after initialization, cipher object can be use only either for
encryption or decryption. Running decrypt() operation after encrypt()
or vice versa is not supported.
Parameters are:
* *key* is an encryption/decryption key (bytes-like).
* *mode* is:
* ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB).
* ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC).
* ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR).
* *IV* is an initialization vector for CBC mode.
* For Counter mode, *IV* is the initial value for the counter.
"""
@overload
def __init__(self, key: AnyReadableBuf, mode: int, IV: AnyReadableBuf, /):
"""
Initialize cipher object, suitable for encryption/decryption. Note:
after initialization, cipher object can be use only either for
encryption or decryption. Running decrypt() operation after encrypt()
or vice versa is not supported.
Parameters are:
* *key* is an encryption/decryption key (bytes-like).
* *mode* is:
* ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB).
* ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC).
* ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR).
* *IV* is an initialization vector for CBC mode.
* For Counter mode, *IV* is the initial value for the counter.
"""

View File

@@ -0,0 +1,85 @@
"""
Deflate compression & decompression.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/deflate.html
This module allows compression and decompression of binary data with the
`DEFLATE algorithm <https://en.wikipedia.org/wiki/DEFLATE>`_
(commonly used in the zlib library and gzip archiver).
**Availability:**
* Added in MicroPython v1.21.
* Decompression: Enabled via the ``MICROPY_PY_DEFLATE`` build option, on by default
on ports with the "extra features" level or higher (which is most boards).
* Compression: Enabled via the ``MICROPY_PY_DEFLATE_COMPRESS`` build option, on
by default on ports with the "full features" level or higher (generally this means
you need to build your own firmware to enable this).
---
Module: 'deflate' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from typing import Final
from _typeshed import Incomplete
from typing_extensions import Awaitable, TypeAlias, TypeVar
GZIP: Final[int] = 3
RAW: Final[int] = 1
ZLIB: Final[int] = 2
AUTO: Final[int] = 0
class DeflateIO:
"""
This class can be used to wrap a *stream* which is any
:term:`stream-like <stream>` object such as a file, socket, or stream
(including :class:`io.BytesIO`). It is itself a stream and implements the
standard read/readinto/write/close methods.
The *stream* must be a blocking stream. Non-blocking streams are currently
not supported.
The *format* can be set to any of the constants defined below, and defaults
to ``AUTO`` which for decompressing will auto-detect gzip or zlib streams,
and for compressing it will generate a raw stream.
The *wbits* parameter sets the base-2 logarithm of the DEFLATE dictionary
window size. So for example, setting *wbits* to ``10`` sets the window size
to 1024 bytes. Valid values are ``5`` to ``15`` inclusive (corresponding to
window sizes of 32 to 32k bytes).
If *wbits* is set to ``0`` (the default), then for compression a window size
of 256 bytes will be used (as if *wbits* was set to 8). For decompression, it
depends on the format:
* ``RAW`` will use 256 bytes (corresponding to *wbits* set to 8).
* ``ZLIB`` (or ``AUTO`` with zlib detected) will use the value from the zlib
header.
* ``GZIP`` (or ``AUTO`` with gzip detected) will use 32 kilobytes
(corresponding to *wbits* set to 15).
See the :ref:`window size <deflate_wbits>` notes below for more information
about the window size, zlib, and gzip streams.
If *close* is set to ``True`` then the underlying stream will be closed
automatically when the :class:`deflate.DeflateIO` stream is closed. This is
useful if you want to return a :class:`deflate.DeflateIO` stream that wraps
another stream and not have the caller need to know about managing the
underlying stream.
If compression is enabled, a given :class:`deflate.DeflateIO` instance
supports both reading and writing. For example, a bidirectional stream like
a socket can be wrapped, which allows for compression/decompression in both
directions.
"""
def readline(self, *args, **kwargs) -> Incomplete: ...
def readinto(self, *args, **kwargs) -> Incomplete: ...
def read(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, stream, format=AUTO, wbits=0, close=False, /) -> None: ...

View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
class DHTBase:
pin: Incomplete
buf: Incomplete
def __init__(self, pin) -> None: ...
def measure(self) -> None: ...
class DHT11(DHTBase):
def humidity(self): ...
def temperature(self): ...
class DHT22(DHTBase):
def humidity(self): ...
def temperature(self): ...

View File

@@ -0,0 +1,16 @@
from _typeshed import Incomplete
from micropython import const as const
_CONVERT: int
_RD_SCRATCH: int
_WR_SCRATCH: int
class DS18X20:
ow: Incomplete
buf: Incomplete
def __init__(self, onewire) -> None: ...
def scan(self): ...
def convert_temp(self) -> None: ...
def read_scratch(self, rom): ...
def write_scratch(self, rom, buf) -> None: ...
def read_temp(self, rom): ...

View File

@@ -0,0 +1,45 @@
"""
System error codes.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/errno.html
CPython module: :mod:`python:errno` https://docs.python.org/3/library/errno.html .
This module provides access to symbolic error codes for `OSError` exception.
A particular inventory of codes depends on :term:`MicroPython port`.
---
Module: 'errno' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from typing import Dict, Final
from _typeshed import Incomplete
from typing_extensions import Awaitable, TypeAlias, TypeVar
ENOBUFS: Final[int] = 105
ENODEV: Final[int] = 19
ENOENT: Final[int] = 2
EISDIR: Final[int] = 21
EIO: Final[int] = 5
EINVAL: Final[int] = 22
EPERM: Final[int] = 1
ETIMEDOUT: Final[int] = 110
ENOMEM: Final[int] = 12
EOPNOTSUPP: Final[int] = 95
ENOTCONN: Final[int] = 107
errorcode: dict = {}
EAGAIN: Final[int] = 11
EALREADY: Final[int] = 114
EBADF: Final[int] = 9
EADDRINUSE: Final[int] = 98
EACCES: Final[int] = 13
EINPROGRESS: Final[int] = 115
EEXIST: Final[int] = 17
EHOSTUNREACH: Final[int] = 113
ECONNABORTED: Final[int] = 103
ECONNRESET: Final[int] = 104
ECONNREFUSED: Final[int] = 111
ENOTSUP: Final[int] = ...

View File

@@ -0,0 +1,227 @@
"""
Frame buffer manipulation.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/framebuf.html
This module provides a general frame buffer which can be used to create
bitmap images, which can then be sent to a display.
---
Module: 'framebuf' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from typing import Any, Optional, overload, Final
from _typeshed import Incomplete
from _mpy_shed import AnyReadableBuf, AnyWritableBuf
from typing_extensions import Awaitable, TypeAlias, TypeVar
MONO_HMSB: Final[int] = 4
MONO_HLSB: Final[int] = 3
RGB565: Final[int] = 1
MONO_VLSB: Final[int] = 0
MVLSB: Final[int] = 0
GS2_HMSB: Final[int] = 5
GS8: Final[int] = 6
GS4_HMSB: Final[int] = 2
def FrameBuffer1(*args, **kwargs) -> Incomplete: ...
class FrameBuffer:
"""
The FrameBuffer class provides a pixel buffer which can be drawn upon with
pixels, lines, rectangles, text and even other FrameBuffer's. It is useful
when generating output for displays.
For example::
import framebuf
# FrameBuffer needs 2 bytes for every RGB565 pixel
fbuf = framebuf.FrameBuffer(bytearray(100 * 10 * 2), 100, 10, framebuf.RGB565)
fbuf.fill(0)
fbuf.text('MicroPython!', 0, 0, 0xffff)
fbuf.hline(0, 9, 96, 0xffff)
"""
def poly(self, x, y, coords, c, f: Optional[Any] = None) -> Incomplete:
"""
Given a list of coordinates, draw an arbitrary (convex or concave) closed
polygon at the given x, y location using the given color.
The *coords* must be specified as a :mod:`array` of integers, e.g.
``array('h', [x0, y0, x1, y1, ... xn, yn])``.
The optional *f* parameter can be set to ``True`` to fill the polygon.
Otherwise just a one pixel outline is drawn.
"""
...
def vline(self, x: int, y: int, h: int, c: int, /) -> None:
"""
Draw a line from a set of coordinates using the given color and
a thickness of 1 pixel. The `line` method draws the line up to
a second set of coordinates whereas the `hline` and `vline`
methods draw horizontal and vertical lines respectively up to
a given length.
"""
@overload
def pixel(self, x: int, y: int, /) -> int:
"""
If *c* is not given, get the color value of the specified pixel.
If *c* is given, set the specified pixel to the given color.
"""
@overload
def pixel(self, x: int, y: int, c: int, /) -> None:
"""
If *c* is not given, get the color value of the specified pixel.
If *c* is given, set the specified pixel to the given color.
"""
def text(self, s: str, x: int, y: int, c: int = 1, /) -> None:
"""
Write text to the FrameBuffer using the coordinates as the upper-left
corner of the text. The color of the text can be defined by the optional
argument but is otherwise a default value of 1. All characters have
dimensions of 8x8 pixels and there is currently no way to change the font.
"""
...
def rect(self, x: int, y: int, w: int, h: int, c: int, /) -> None:
"""
Draw a rectangle at the given location, size and color.
The optional *f* parameter can be set to ``True`` to fill the rectangle.
Otherwise just a one pixel outline is drawn.
"""
...
def scroll(self, xstep: int, ystep: int, /) -> None:
"""
Shift the contents of the FrameBuffer by the given vector. This may
leave a footprint of the previous colors in the FrameBuffer.
"""
...
def ellipse(self, x, y, xr, yr, c, f, m: Optional[Any] = None) -> None:
"""
Draw an ellipse at the given location. Radii *xr* and *yr* define the
geometry; equal values cause a circle to be drawn. The *c* parameter
defines the color.
The optional *f* parameter can be set to ``True`` to fill the ellipse.
Otherwise just a one pixel outline is drawn.
The optional *m* parameter enables drawing to be restricted to certain
quadrants of the ellipse. The LS four bits determine which quadrants are
to be drawn, with bit 0 specifying Q1, b1 Q2, b2 Q3 and b3 Q4. Quadrants
are numbered counterclockwise with Q1 being top right.
"""
...
def line(self, x1: int, y1: int, x2: int, y2: int, c: int, /) -> None:
"""
Draw a line from a set of coordinates using the given color and
a thickness of 1 pixel. The `line` method draws the line up to
a second set of coordinates whereas the `hline` and `vline`
methods draw horizontal and vertical lines respectively up to
a given length.
"""
...
def blit(
self,
fbuf: FrameBuffer,
x: int,
y: int,
key: int = -1,
palette: Optional[bytes] = None,
/,
) -> None:
"""
Draw another FrameBuffer on top of the current one at the given coordinates.
If *key* is specified then it should be a color integer and the
corresponding color will be considered transparent: all pixels with that
color value will not be drawn. (If the *palette* is specified then the *key*
is compared to the value from *palette*, not to the value directly from
*fbuf*.)
*fbuf* can be another FrameBuffer instance, or a tuple or list of the form::
(buffer, width, height, format)
or::
(buffer, width, height, format, stride)
This matches the signature of the FrameBuffer constructor, and the elements
of the tuple/list are the same as the arguments to the constructor except that
the *buffer* here can be read-only.
The *palette* argument enables blitting between FrameBuffers with differing
formats. Typical usage is to render a monochrome or grayscale glyph/icon to
a color display. The *palette* is a FrameBuffer instance whose format is
that of the current FrameBuffer. The *palette* height is one pixel and its
pixel width is the number of colors in the source FrameBuffer. The *palette*
for an N-bit source needs 2**N pixels; the *palette* for a monochrome source
would have 2 pixels representing background and foreground colors. The
application assigns a color to each pixel in the *palette*. The color of the
current pixel will be that of that *palette* pixel whose x position is the
color of the corresponding source pixel.
"""
...
def hline(self, x: int, y: int, w: int, c: int, /) -> None:
"""
Draw a line from a set of coordinates using the given color and
a thickness of 1 pixel. The `line` method draws the line up to
a second set of coordinates whereas the `hline` and `vline`
methods draw horizontal and vertical lines respectively up to
a given length.
"""
def fill(self, c: int, /) -> None:
"""
Fill the entire FrameBuffer with the specified color.
"""
...
def fill_rect(self, *args, **kwargs) -> Incomplete: ...
def __init__(
self,
buffer: AnyWritableBuf,
width: int,
height: int,
format: int,
stride: int = ...,
/,
) -> None:
"""
Construct a FrameBuffer object. The parameters are:
- *buffer* is an object with a buffer protocol which must be large
enough to contain every pixel defined by the width, height and
format of the FrameBuffer.
- *width* is the width of the FrameBuffer in pixels
- *height* is the height of the FrameBuffer in pixels
- *format* specifies the type of pixel used in the FrameBuffer;
permissible values are listed under Constants below. These set the
number of bits used to encode a color value and the layout of these
bits in *buffer*.
Where a color value c is passed to a method, c is a small integer
with an encoding that is dependent on the format of the FrameBuffer.
- *stride* is the number of pixels between each horizontal line
of pixels in the FrameBuffer. This defaults to *width* but may
need adjustments when implementing a FrameBuffer within another
larger FrameBuffer or screen. The *buffer* size must accommodate
an increased step size.
One must specify valid *buffer*, *width*, *height*, *format* and
optionally *stride*. Invalid *buffer* size or dimensions may lead to
unexpected errors.
"""

View File

@@ -0,0 +1,112 @@
"""
Control the garbage collector.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/gc.html
CPython module: :mod:`python:gc` https://docs.python.org/3/library/gc.html .
---
Module: 'gc' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing import overload
from typing_extensions import Awaitable, TypeAlias, TypeVar
def mem_alloc() -> int:
"""
Return the number of bytes of heap RAM that are allocated by Python code.
Admonition:Difference to CPython
:class: attention
This function is MicroPython extension.
"""
...
def isenabled(*args, **kwargs) -> Incomplete: ...
def mem_free() -> int:
"""
Return the number of bytes of heap RAM that is available for Python
code to allocate, or -1 if this amount is not known.
Admonition:Difference to CPython
:class: attention
This function is MicroPython extension.
"""
...
@overload
def threshold() -> int:
"""
Set or query the additional GC allocation threshold. Normally, a collection
is triggered only when a new allocation cannot be satisfied, i.e. on an
out-of-memory (OOM) condition. If this function is called, in addition to
OOM, a collection will be triggered each time after *amount* bytes have been
allocated (in total, since the previous time such an amount of bytes
have been allocated). *amount* is usually specified as less than the
full heap size, with the intention to trigger a collection earlier than when the
heap becomes exhausted, and in the hope that an early collection will prevent
excessive memory fragmentation. This is a heuristic measure, the effect
of which will vary from application to application, as well as
the optimal value of the *amount* parameter.
Calling the function without argument will return the current value of
the threshold. A value of -1 means a disabled allocation threshold.
Admonition:Difference to CPython
:class: attention
This function is a MicroPython extension. CPython has a similar
function - ``set_threshold()``, but due to different GC
implementations, its signature and semantics are different.
"""
@overload
def threshold(amount: int) -> None:
"""
Set or query the additional GC allocation threshold. Normally, a collection
is triggered only when a new allocation cannot be satisfied, i.e. on an
out-of-memory (OOM) condition. If this function is called, in addition to
OOM, a collection will be triggered each time after *amount* bytes have been
allocated (in total, since the previous time such an amount of bytes
have been allocated). *amount* is usually specified as less than the
full heap size, with the intention to trigger a collection earlier than when the
heap becomes exhausted, and in the hope that an early collection will prevent
excessive memory fragmentation. This is a heuristic measure, the effect
of which will vary from application to application, as well as
the optimal value of the *amount* parameter.
Calling the function without argument will return the current value of
the threshold. A value of -1 means a disabled allocation threshold.
Admonition:Difference to CPython
:class: attention
This function is a MicroPython extension. CPython has a similar
function - ``set_threshold()``, but due to different GC
implementations, its signature and semantics are different.
"""
def collect() -> None:
"""
Run a garbage collection.
"""
...
def enable() -> None:
"""
Enable automatic garbage collection.
"""
...
def disable() -> None:
"""
Disable automatic garbage collection. Heap memory can still be allocated,
and garbage collection can still be initiated manually using :meth:`gc.collect`.
"""
...

View File

@@ -0,0 +1,104 @@
"""
Hashing algorithms.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/hashlib.html
CPython module: :mod:`python:hashlib` https://docs.python.org/3/library/hashlib.html .
This module implements binary data hashing algorithms. The exact inventory
of available algorithms depends on a board. Among the algorithms which may
be implemented:
* SHA256 - The current generation, modern hashing algorithm (of SHA2 series).
It is suitable for cryptographically-secure purposes. Included in the
MicroPython core and any board is recommended to provide this, unless
it has particular code size constraints.
* SHA1 - A previous generation algorithm. Not recommended for new usages,
but SHA1 is a part of number of Internet standards and existing
applications, so boards targeting network connectivity and
interoperability will try to provide this.
* MD5 - A legacy algorithm, not considered cryptographically secure. Only
selected boards, targeting interoperability with legacy applications,
will offer this.
---
Module: 'hashlib' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from _mpy_shed import AnyReadableBuf, AnyWritableBuf, _Hash
from typing import overload
from typing_extensions import Awaitable, TypeAlias, TypeVar
class sha256(_Hash):
"""
The current generation, modern hashing algorithm (of SHA2 series).
It is suitable for cryptographically-secure purposes. Included in the
MicroPython core and any board is recommended to provide this, unless
it has particular code size constraints.
"""
def digest(self, *args, **kwargs) -> Incomplete: ...
def update(self, *args, **kwargs) -> Incomplete: ...
@overload
def __init__(self):
"""
Create an SHA256 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self, data: AnyReadableBuf):
"""
Create an SHA256 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self):
"""
Create an SHA256 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self, data: AnyReadableBuf):
"""
Create an SHA256 hasher object and optionally feed ``data`` into it.
"""
class sha1(_Hash):
"""
A previous generation algorithm. Not recommended for new usages,
but SHA1 is a part of number of Internet standards and existing
applications, so boards targeting network connectivity and
interoperability will try to provide this.
"""
def digest(self, *args, **kwargs) -> Incomplete: ...
def update(self, *args, **kwargs) -> Incomplete: ...
@overload
def __init__(self):
"""
Create an SHA1 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self, data: AnyReadableBuf):
"""
Create an SHA1 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self):
"""
Create an SHA1 hasher object and optionally feed ``data`` into it.
"""
@overload
def __init__(self, data: AnyReadableBuf):
"""
Create an SHA1 hasher object and optionally feed ``data`` into it.
"""

View File

@@ -0,0 +1,46 @@
"""
Heap queue algorithm.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/heapq.html
CPython module: :mod:`python:heapq` https://docs.python.org/3/library/heapq.html .
This module implements the
`min heap queue algorithm <https://en.wikipedia.org/wiki/Heap_%28data_structure%29>`_.
A heap queue is essentially a list that has its elements stored in such a way
that the first item of the list is always the smallest.
---
Module: 'heapq' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing import Any
from typing_extensions import Awaitable, TypeAlias, TypeVar
_T = TypeVar("_T")
def heappop(heap: list[_T], /) -> _T:
"""
Pop the first item from the ``heap``, and return it. Raise ``IndexError`` if
``heap`` is empty.
The returned item will be the smallest item in the ``heap``.
"""
...
def heappush(heap: list[_T], item: _T, /) -> None:
"""
Push the ``item`` onto the ``heap``.
"""
...
def heapify(x: list[Any], /) -> None:
"""
Convert the list ``x`` into a heap. This is an in-place operation.
"""
...

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,267 @@
"""
Mathematical functions.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/math.html
CPython module: :mod:`python:math` https://docs.python.org/3/library/math.html .
The ``math`` module provides some basic mathematical functions for
working with floating-point numbers.
*Note:* On the pyboard, floating-point numbers have 32-bit precision.
Availability: not available on WiPy. Floating point support required
for this module.
---
Module: 'math' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from typing import SupportsFloat, Tuple
from typing_extensions import Awaitable, TypeAlias, TypeVar
inf: float = inf
nan: float = nan
pi: float = 3.1415928
e: float = 2.7182818
tau: float = 6.2831856
def ldexp(x: SupportsFloat, exp: int, /) -> float:
"""
Return ``x * (2**exp)``.
"""
...
def lgamma(x: SupportsFloat, /) -> float:
"""
Return the natural logarithm of the gamma function of ``x``.
"""
...
def trunc(x: SupportsFloat, /) -> int:
"""
Return an integer, being ``x`` rounded towards 0.
"""
...
def isclose(*args, **kwargs) -> Incomplete: ...
def gamma(x: SupportsFloat, /) -> float:
"""
Return the gamma function of ``x``.
"""
...
def isnan(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is not-a-number
"""
...
def isfinite(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is finite.
"""
...
def isinf(x: SupportsFloat, /) -> bool:
"""
Return ``True`` if ``x`` is infinite.
"""
...
def sqrt(x: SupportsFloat, /) -> float:
"""
Return the square root of ``x``.
"""
...
def sinh(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic sine of ``x``.
"""
...
def log(x: SupportsFloat, /) -> float:
"""
With one argument, return the natural logarithm of *x*.
With two arguments, return the logarithm of *x* to the given *base*.
"""
...
def tan(x: SupportsFloat, /) -> float:
"""
Return the tangent of ``x``.
"""
...
def tanh(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic tangent of ``x``.
"""
...
def log2(x: SupportsFloat, /) -> float:
"""
Return the base-2 logarithm of ``x``.
"""
...
def log10(x: SupportsFloat, /) -> float:
"""
Return the base-10 logarithm of ``x``.
"""
...
def sin(x: SupportsFloat, /) -> float:
"""
Return the sine of ``x``.
"""
...
def modf(x: SupportsFloat, /) -> Tuple:
"""
Return a tuple of two floats, being the fractional and integral parts of
``x``. Both return values have the same sign as ``x``.
"""
...
def radians(x: SupportsFloat, /) -> float:
"""
Return degrees ``x`` converted to radians.
"""
...
def atanh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic tangent of ``x``.
"""
...
def atan2(y: SupportsFloat, x: SupportsFloat, /) -> float:
"""
Return the principal value of the inverse tangent of ``y/x``.
"""
...
def atan(x: SupportsFloat, /) -> float:
"""
Return the inverse tangent of ``x``.
"""
...
def ceil(x: SupportsFloat, /) -> int:
"""
Return an integer, being ``x`` rounded towards positive infinity.
"""
...
def copysign(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Return ``x`` with the sign of ``y``.
"""
...
def frexp(x: SupportsFloat, /) -> tuple[float, int]:
"""
Decomposes a floating-point number into its mantissa and exponent.
The returned value is the tuple ``(m, e)`` such that ``x == m * 2**e``
exactly. If ``x == 0`` then the function returns ``(0.0, 0)``, otherwise
the relation ``0.5 <= abs(m) < 1`` holds.
"""
...
def acos(x: SupportsFloat, /) -> float:
"""
Return the inverse cosine of ``x``.
"""
...
def pow(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Returns ``x`` to the power of ``y``.
"""
...
def asinh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic sine of ``x``.
"""
...
def acosh(x: SupportsFloat, /) -> float:
"""
Return the inverse hyperbolic cosine of ``x``.
"""
...
def asin(x: SupportsFloat, /) -> float:
"""
Return the inverse sine of ``x``.
"""
...
def factorial(*args, **kwargs) -> Incomplete: ...
def fabs(x: SupportsFloat, /) -> float:
"""
Return the absolute value of ``x``.
"""
...
def expm1(x: SupportsFloat, /) -> float:
"""
Return ``exp(x) - 1``.
"""
...
def floor(x: SupportsFloat, /) -> int:
"""
Return an integer, being ``x`` rounded towards negative infinity.
"""
...
def fmod(x: SupportsFloat, y: SupportsFloat, /) -> float:
"""
Return the remainder of ``x/y``.
"""
...
def cos(x: SupportsFloat, /) -> float:
"""
Return the cosine of ``x``.
"""
...
def degrees(x: SupportsFloat, /) -> float:
"""
Return radians ``x`` converted to degrees.
"""
...
def cosh(x: SupportsFloat, /) -> float:
"""
Return the hyperbolic cosine of ``x``.
"""
...
def exp(x: SupportsFloat, /) -> float:
"""
Return the exponential of ``x``.
"""
...
def erf(x: SupportsFloat, /) -> float:
"""
Return the error function of ``x``.
"""
...
def erfc(x: SupportsFloat, /) -> float:
"""
Return the complementary error function of ``x``.
"""
...

View File

@@ -0,0 +1,350 @@
"""
Access and control MicroPython internals.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/micropython.html
---
Module: 'micropython' on micropython-v1.26.0-rp2-RPI_PICO
"""
# MCU: {'mpy': 'v6.3', 'build': '', 'ver': '1.26.0', 'arch': 'armv6m', 'version': '1.26.0', 'port': 'rp2', 'board': 'RPI_PICO', 'family': 'micropython', 'board_id': 'RPI_PICO', 'variant': '', 'cpu': 'RP2040'}
# Stubber: v1.26.0
from __future__ import annotations
from _typeshed import Incomplete
from _mpy_shed import mp_available
from typing import Any, Callable, Optional, Tuple, overload
from typing_extensions import Awaitable, ParamSpec, TypeAlias, TypeVar
_T = TypeVar("_T")
_F = TypeVar("_F", bound=Callable[..., Any])
Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple)
_Param = ParamSpec("_Param")
_Ret = TypeVar("_Ret")
@overload
def opt_level() -> int:
"""
If *level* is given then this function sets the optimisation level for subsequent
compilation of scripts, and returns ``None``. Otherwise it returns the current
optimisation level.
The optimisation level controls the following compilation features:
- Assertions: at level 0 assertion statements are enabled and compiled into the
bytecode; at levels 1 and higher assertions are not compiled.
- Built-in ``__debug__`` variable: at level 0 this variable expands to ``True``;
at levels 1 and higher it expands to ``False``.
- Source-code line numbers: at levels 0, 1 and 2 source-code line number are
stored along with the bytecode so that exceptions can report the line number
they occurred at; at levels 3 and higher line numbers are not stored.
The default optimisation level is usually level 0.
"""
@overload
def opt_level(level: int, /) -> None:
"""
If *level* is given then this function sets the optimisation level for subsequent
compilation of scripts, and returns ``None``. Otherwise it returns the current
optimisation level.
The optimisation level controls the following compilation features:
- Assertions: at level 0 assertion statements are enabled and compiled into the
bytecode; at levels 1 and higher assertions are not compiled.
- Built-in ``__debug__`` variable: at level 0 this variable expands to ``True``;
at levels 1 and higher it expands to ``False``.
- Source-code line numbers: at levels 0, 1 and 2 source-code line number are
stored along with the bytecode so that exceptions can report the line number
they occurred at; at levels 3 and higher line numbers are not stored.
The default optimisation level is usually level 0.
"""
@overload
def mem_info() -> None:
"""
Print information about currently used memory. If the *verbose* argument
is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the amount of stack and heap used. In verbose mode it prints out
the entire heap indicating which blocks are used and which are free.
"""
@overload
def mem_info(verbose: Any, /) -> None:
"""
Print information about currently used memory. If the *verbose* argument
is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the amount of stack and heap used. In verbose mode it prints out
the entire heap indicating which blocks are used and which are free.
"""
def kbd_intr(chr: int) -> None:
"""
Set the character that will raise a `KeyboardInterrupt` exception. By
default this is set to 3 during script execution, corresponding to Ctrl-C.
Passing -1 to this function will disable capture of Ctrl-C, and passing 3
will restore it.
This function can be used to prevent the capturing of Ctrl-C on the
incoming stream of characters that is usually used for the REPL, in case
that stream is used for other purposes.
"""
...
@overload
def qstr_info() -> None:
"""
Print information about currently interned strings. If the *verbose*
argument is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the number of interned strings and the amount of RAM they use. In
verbose mode it prints out the names of all RAM-interned strings.
"""
@overload
def qstr_info(verbose: bool, /) -> None:
"""
Print information about currently interned strings. If the *verbose*
argument is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the number of interned strings and the amount of RAM they use. In
verbose mode it prints out the names of all RAM-interned strings.
"""
def schedule(func: Callable[[_T], None], arg: _T, /) -> None:
"""
Schedule the function *func* to be executed "very soon". The function
is passed the value *arg* as its single argument. "Very soon" means that
the MicroPython runtime will do its best to execute the function at the
earliest possible time, given that it is also trying to be efficient, and
that the following conditions hold:
- A scheduled function will never preempt another scheduled function.
- Scheduled functions are always executed "between opcodes" which means
that all fundamental Python operations (such as appending to a list)
are guaranteed to be atomic.
- A given port may define "critical regions" within which scheduled
functions will never be executed. Functions may be scheduled within
a critical region but they will not be executed until that region
is exited. An example of a critical region is a preempting interrupt
handler (an IRQ).
A use for this function is to schedule a callback from a preempting IRQ.
Such an IRQ puts restrictions on the code that runs in the IRQ (for example
the heap may be locked) and scheduling a function to call later will lift
those restrictions.
On multi-threaded ports, the scheduled function's behaviour depends on
whether the Global Interpreter Lock (GIL) is enabled for the specific port:
- If GIL is enabled, the function can preempt any thread and run in its
context.
- If GIL is disabled, the function will only preempt the main thread and run
in its context.
Note: If `schedule()` is called from a preempting IRQ, when memory
allocation is not allowed and the callback to be passed to `schedule()` is
a bound method, passing this directly will fail. This is because creating a
reference to a bound method causes memory allocation. A solution is to
create a reference to the method in the class constructor and to pass that
reference to `schedule()`. This is discussed in detail here
:ref:`reference documentation <isr_rules>` under "Creation of Python
objects".
There is a finite queue to hold the scheduled functions and `schedule()`
will raise a `RuntimeError` if the queue is full.
"""
...
def stack_use() -> int:
"""
Return an integer representing the current amount of stack that is being
used. The absolute value of this is not particularly useful, rather it
should be used to compute differences in stack usage at different points.
"""
...
def heap_unlock() -> int:
"""
Lock or unlock the heap. When locked no memory allocation can occur and a
`MemoryError` will be raised if any heap allocation is attempted.
`heap_locked()` returns a true value if the heap is currently locked.
These functions can be nested, ie `heap_lock()` can be called multiple times
in a row and the lock-depth will increase, and then `heap_unlock()` must be
called the same number of times to make the heap available again.
Both `heap_unlock()` and `heap_locked()` return the current lock depth
(after unlocking for the former) as a non-negative integer, with 0 meaning
the heap is not locked.
If the REPL becomes active with the heap locked then it will be forcefully
unlocked.
Note: `heap_locked()` is not enabled on most ports by default,
requires ``MICROPY_PY_MICROPYTHON_HEAP_LOCKED``.
"""
def const(expr: Const_T, /) -> Const_T:
"""
Used to declare that the expression is a constant so that the compiler can
optimise it. The use of this function should be as follows::
from micropython import const
CONST_X = const(123)
CONST_Y = const(2 * CONST_X + 1)
Constants declared this way are still accessible as global variables from
outside the module they are declared in. On the other hand, if a constant
begins with an underscore then it is hidden, it is not available as a global
variable, and does not take up any memory during execution.
This `const` function is recognised directly by the MicroPython parser and is
provided as part of the :mod:`micropython` module mainly so that scripts can be
written which run under both CPython and MicroPython, by following the above
pattern.
"""
...
def heap_lock() -> int:
"""
Lock or unlock the heap. When locked no memory allocation can occur and a
`MemoryError` will be raised if any heap allocation is attempted.
`heap_locked()` returns a true value if the heap is currently locked.
These functions can be nested, ie `heap_lock()` can be called multiple times
in a row and the lock-depth will increase, and then `heap_unlock()` must be
called the same number of times to make the heap available again.
Both `heap_unlock()` and `heap_locked()` return the current lock depth
(after unlocking for the former) as a non-negative integer, with 0 meaning
the heap is not locked.
If the REPL becomes active with the heap locked then it will be forcefully
unlocked.
Note: `heap_locked()` is not enabled on most ports by default,
requires ``MICROPY_PY_MICROPYTHON_HEAP_LOCKED``.
"""
def alloc_emergency_exception_buf(size: int, /) -> None:
"""
Allocate *size* bytes of RAM for the emergency exception buffer (a good
size is around 100 bytes). The buffer is used to create exceptions in cases
when normal RAM allocation would fail (eg within an interrupt handler) and
therefore give useful traceback information in these situations.
A good way to use this function is to put it at the start of your main script
(eg ``boot.py`` or ``main.py``) and then the emergency exception buffer will be active
for all the code following it.
"""
...
class RingIO:
def readinto(self, buf, nbytes: Optional[Any] = None) -> int:
"""
Read available bytes into the provided ``buf``. If ``nbytes`` is
specified then read at most that many bytes. Otherwise, read at
most ``len(buf)`` bytes.
Return value: Integer count of the number of bytes read into ``buf``.
"""
...
def write(self, buf) -> int:
"""
Non-blocking write of bytes from ``buf`` into the ringbuffer, limited
by the available space in the ringbuffer.
Return value: Integer count of bytes written.
"""
...
def readline(self, nbytes: Optional[Any] = None) -> bytes:
"""
Read a line, ending in a newline character or return if one exists in
the buffer, else return available bytes in buffer. If ``nbytes`` is
specified then read at most that many bytes.
Return value: a bytes object containing the line read.
"""
...
def any(self) -> int:
"""
Returns an integer counting the number of characters that can be read.
"""
...
def read(self, nbytes: Optional[Any] = None) -> bytes:
"""
Read available characters. This is a non-blocking function. If ``nbytes``
is specified then read at most that many bytes, otherwise read as much
data as possible.
Return value: a bytes object containing the bytes read. Will be
zero-length bytes object if no data is available.
"""
...
def close(self) -> Incomplete:
"""
No-op provided as part of standard `stream` interface. Has no effect
on data in the ringbuffer.
"""
...
def __init__(self, size) -> None: ...
# decorators
@mp_available() # force merge
def viper(_func: Callable[_Param, _Ret], /) -> Callable[_Param, _Ret]:
"""
The Viper code emitter is not fully compliant. It supports special Viper native data types in pursuit of performance.
Integer processing is non-compliant because it uses machine words: arithmetic on 32 bit hardware is performed modulo 2**32.
Like the Native emitter Viper produces machine instructions but further optimisations are performed, substantially increasing
performance especially for integer arithmetic and bit manipulations.
See: https://docs.micropython.org/en/latest/reference/speed_python.html?highlight=viper#the-native-code-emitter
"""
...
@mp_available() # force merge
def native(_func: Callable[_Param, _Ret], /) -> Callable[_Param, _Ret]:
"""
This causes the MicroPython compiler to emit native CPU opcodes rather than bytecode.
It covers the bulk of the MicroPython functionality, so most functions will require no adaptation.
See: https://docs.micropython.org/en/latest/reference/speed_python.html#the-native-code-emitter
"""
...
@mp_available(macro="MICROPY_EMIT_INLINE_THUMB") # force merge
def asm_thumb(_func: Callable[_Param, _Ret], /) -> Callable[_Param, _Ret]:
"""
This decorator is used to mark a function as containing inline assembler code.
The assembler code is written is a subset of the ARM Thumb-2 instruction set, and is executed on the target CPU.
Availability: Only on specific boards where MICROPY_EMIT_INLINE_THUMB is defined.
See: https://docs.micropython.org/en/latest/reference/asm_thumb2_index.html
"""
...
@mp_available(port="esp8266") # force merge
def asm_xtensa(_func: Callable[_Param, _Ret], /) -> Callable[_Param, _Ret]:
"""
This decorator is used to mark a function as containing inline assembler code for the esp8266.
The assembler code is written in the Xtensa instruction set, and is executed on the target CPU.
Availability: Only on eps8266 boards.
"""
...
# See :
# - https://github.com/orgs/micropython/discussions/12965
# - https://github.com/micropython/micropython/pull/16731

View File

@@ -0,0 +1,22 @@
MIT License
Copyright (c) 2022 Jos Verlinde
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,64 @@
Metadata-Version: 2.3
Name: micropython-rp2-stubs
Version: 1.26.0.post1
Summary: MicroPython stubs
License: MIT
Author: Jos Verlinde
Author-email: josverl@users.noreply.github.com
Classifier: Typing :: Stubs Only
Classifier: Development Status :: 5 - Production/Stable
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: Implementation :: MicroPython
Classifier: Operating System :: OS Independent
Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE)
Classifier: Topic :: Software Development :: Documentation
Classifier: Topic :: Software Development :: Embedded Systems
Classifier: Topic :: Software Development :: Testing
Classifier: Natural Language :: English
Requires-Dist: micropython-stdlib-stubs (>=1.26.0,<1.27.0)
Project-URL: Documentation, https://micropython-stubs.readthedocs.io/
Project-URL: Homepage, https://github.com/josverl/micropython-stubs#micropython-stubs
Project-URL: Repository, https://github.com/josverl/micropython-stubs
Description-Content-Type: text/markdown
# micropython-rp2-stubs
This is a stub-only package for MicroPython.
It is intended to be installed in a projects virtual environment to allow static type checkers and intellisense features to be used while writing Micropython code.
The version of this package is alligned the the version of the MicroPython firmware.
- Major, Minor and Patch levels are alligned to the same version as the firmware.
- The post release level is used to publish new releases of the stubs.
For `Micropython 1.17` the stubs are published as `1.17.post1` ... `1.17.post2`
for `Micropython 1.18` the stubs are published as `1.18.post1` ... `1.18.post2`
To install the latest stubs:
`pip install -I micropython-<port>-stubs` where port is the port of the MicroPython firmware.
To install the stubs for an older version, such as MicroPython 1.17:
`pip install micropython-stm32-stubs==1.17.*` which will install the last post release of the stubs for MicroPython 1.17.
As the creation of the stubs, and merging of the different types is still going though improvements, the stub packages are marked as Beta.
To upgrade stubs to the latest stubs for a specific version use `pip install micropython-stm32-stubs==1.17.* --upgrade`
If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions)
For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/
* List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html
Included stubs:
* Merged stubs from `stubs/micropython-v1_26_0-rp2-RPI_PICO-merged`
* Frozen stubs from `stubs/micropython-v1_26_0-frozen/rp2/GENERIC`
* Core stubs from `stubs/micropython-core`
origin | Family | Port | Board | Version
-------|--------|------|-------|--------

View File

@@ -0,0 +1,58 @@
__builtins__.pyi,sha256=P0dwpsSqizQRPmaI6J275kc7G35m38YjafDPz4SEdKI,1097
_boot.pyi,sha256=0e1lrk87JxZMowULLiOAz7leOmzBkvFCabOD4i-djzY,70
_boot_fat.pyi,sha256=5SN-IunMd7ESRkWUm4XH1DLoX8F2_R0gm-p_dvC4i-4,54
_onewire.pyi,sha256=Aoqj9t5Zo48w2nwqKFF3bKUjvDFKGM0TBcRhcMDoMQE,663
_thread.pyi,sha256=qruh97SKzvBuIMEP6g-pEOwEw_X18yhcNpnvmQMznCw,1318
binascii.pyi,sha256=Js4mvTBtShAqzpkr53R_OS94gZnxyr5EvSavOiLZScg,2121
cmath.pyi,sha256=W7LjCb4tdmlZVxtU3_E49ae_CLumfotRT36AItkySMA,2110
cryptolib.pyi,sha256=fyYS0lSiMjhSQCN6FEHB10llwxyrghxvk8LucmF1tto,6734
deflate.pyi,sha256=4GYBP3Ce-Trro2pMGFwNTeBM_sCux681xE3J1fGVYKA,3746
dht.pyi,sha256=zamvhZo46pXwloiPKKlldLlMYmb7waNyZE1aeUnz-vA,344
ds18x20.pyi,sha256=aHK7R9hREAFdOgPBd5dx8-MPBORBUvwHWNDDSVULl-Q,423
errno.pyi,sha256=8036ME2KKlPQhWRvp-GYue4P7vYrMHKIM4UTemfgSkA,1448
framebuf.pyi,sha256=DWoxiTKnEjO0_0nrK0SuLOvFSrnNdLSsAoQiqOoaBDw,9265
gc.pyi,sha256=Dg_YTQxCHCGzNWFyBD0LtVvz4q1EpDpD3Khvuxxz70g,4279
hashlib.pyi,sha256=JASAsPp2aT0JvqOz9eE44AulQ63PKKVgPsx4WPPJDKM,3665
heapq.pyi,sha256=zVdjTbcM-IpP6FXBhDPhlbxb3Q0yfbu6-lkGvOxoBmk,1433
machine.pyi,sha256=3ZcwR0rvop3NAjhBxDMfsDb1w95J6oIGXfozOoDGN2g,129722
math.pyi,sha256=u6AllM7vjnJdLS45X5xGJHfeH0FJXS6ypCJznS5defQ,6041
micropython.pyi,sha256=oyWt_RtD_SfZfSgwUmrxJobn_VTGjHU3ZqmvedPh7FM,14996
micropython_rp2_stubs-1.26.0.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
micropython_rp2_stubs-1.26.0.post1.dist-info/LICENSE.md,sha256=EerY3Evf4ZqwZBZMGIokOHR70txtsAlS_GGHuxmxUJY,1092
micropython_rp2_stubs-1.26.0.post1.dist-info/METADATA,sha256=TgVmz4GDRvkH6jpyzXhgRQ98fOCpJc9S6uHnpdTNbhs,3033
micropython_rp2_stubs-1.26.0.post1.dist-info/RECORD,,
micropython_rp2_stubs-1.26.0.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
micropython_rp2_stubs-1.26.0.post1.dist-info/WHEEL,sha256=5druYqcII7zHXzX7qmN0TH895Jg3yuTtfjMgJIVBrKE,92
neopixel.pyi,sha256=FdRUbjJVKfF98ojrMeBrFsw6Sm9AY6ES9HM6NmYvxa0,2607
onewire.pyi,sha256=UboaaLJILaciyu73yPK4sF7B-humChu0LOWtEUM6ktk,619
platform.pyi,sha256=2BjgGrD2-65FgM-_ypN2pPqXkTM8EGnmBgP9WATwUls,1829
random.pyi,sha256=X040iEdWm4DM1bGhRYrNz4Pxmkn3pjPmYCF1OfCEIvs,4030
rp2/__init__.pyi,sha256=gHedoolqLnlAUpaAjYFGb-dz5eZ0foeSwPwMs2PlA7U,42977
rp2/asm_pio.pyi,sha256=-JsGzHyzi8dsRP3QNHyvm-6at6VUJzwEDj-Yaq-Z0mc,23085
select.pyi,sha256=XT6lIWsV6LjvCWAx2sSUQcNyTokFlWlzj-385eZIV00,4695
time.pyi,sha256=MgdDX9YL8fosE8AKbRd8mMrgsWvBt1sk6h9wtZeCQz0,14007
uarray.pyi,sha256=_vDoA-BKjQMQIwjaCAV1nll5vAEL2T-xgrQiDm5xVdE,73
uasyncio.pyi,sha256=jeB0u_5xMQ2V5xyJ6YzCNrCdjy4_O_28uRd3qCRbuhI,77
ubinascii.pyi,sha256=TJgiLXvErAY6Md4T6yyuBNMJZ57o0IqBbmwrKi42DVA,79
ubluetooth.pyi,sha256=qx8fxO2JRh4LcUimgzbDAG-O2g1N-U_WOPz36sE1ZRs,81
ucollections.pyi,sha256=JlFgl88kORBqQ_XreUQVVUmI2jwKnOgOCIl-GhuBv6Q,460
ucryptolib.pyi,sha256=ZFBml3gtn0iraWDms2FPcrZPjGWXJCLgxPR6zR1ey80,598
uctypes.pyi,sha256=7MQ1ku0y4yWQ316kBiExgJl8gHoaw7L-zTTPWMBqsbI,3803
uerrno.pyi,sha256=O8b_ZlyhYouSdtOm7p53FTymfarjAKAupHOx1brBq3s,73
uhashlib.pyi,sha256=4l0OybeRxckunXbeFW748giv6giTcOkeZg5ZLFX4s7Q,782
uheapq.pyi,sha256=vzpZiyC3LfaTphjTfKWTATjRoD7XVGUy3kr2C242v6M,558
uio.pyi,sha256=dWmUZ-iYMZu4KKOqspZ5ZXj3jv9K6dCOojK7_nlihUk,67
ujson.pyi,sha256=pggiInEKs9lBzun6IvWh8m9iQr6cadxBUYgyp8dugQk,71
umachine.pyi,sha256=NpuoOYQYmWHwFuUCk3jAvPRyRg4S6WqoR1ddVKsLre0,77
uos.pyi,sha256=00KOdXyvwpFYd_DhOpOkDC6qI0-F9tCOxvyIo00dsL8,67
uplatform.pyi,sha256=mKYGUtsWRzkXT-nhG5XPR32jJ1IbJNSMmYodaAvZaJc,79
urandom.pyi,sha256=BAm-lBWsPMYYFukHYb1-CQElDGconl4n51MWoQmGbO4,755
ure.pyi,sha256=MIvrONKD5LKa9fwXMllrbDm_lzkqGWLwpreoCHMJBSc,596
uselect.pyi,sha256=i3wd5YYMhhhN0oROk4fMPYkwFPjunp8C09_3dPLQdVM,75
usocket.pyi,sha256=ih8oT6AEAf9mslyw7AksnAmkUWbT4uyhV4Y_HFLPE6Q,75
ussl.pyi,sha256=4gPHz7LcHDQS4oA3uFgH0yfeoTUslSk72qgqDqSN9BA,69
ustruct.pyi,sha256=szRH88vBKutJsEKsfkzRYn8nYQP4j8POjBMHPbIedws,75
usys.pyi,sha256=HRfWC305rZ4OKTBu_1UmPpDNNS1uKMy8DuFrfHV-4G4,69
utime.pyi,sha256=bf-Oee8krzlA5AkXG_hrNgkijrmD9eNXEpUWLjt8o6s,71
uzlib.pyi,sha256=TaBEcTT07wPvtMX6Vtkz2KLnfGSqmnq6lVPtp9Wh3yc,71
vfs.pyi,sha256=vrMyC_Y_gt3mvxOwQuH2o0sI4Ccu5Kme3ydPTsWzi7s,10897

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: poetry-core 2.1.3
Root-Is-Purelib: true
Tag: py2.py3-none-any

View File

@@ -0,0 +1,86 @@
Metadata-Version: 2.4
Name: micropython-stdlib-stubs
Version: 1.26.0.post3
Summary: Micropython stdlib is a reduced and augmented copy of typeshed's stdlib for use by MicroPython stub packages
Project-URL: homepage, https://github.com/josverl/micropython-stubs#micropython-stubs
Project-URL: documentation, https://micropython-stubs.readthedocs.io/
Project-URL: repository, https://github.com/josverl/micropython-stubs
Author-email: Jos Verlinde <josverl@users.noreply.github.com>
License-Expression: MIT
License-File: LICENSE.md
License-File: LICENSE_typeshed
Classifier: Development Status :: 5 - Production/Stable
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: Implementation :: MicroPython
Classifier: Topic :: Software Development :: Build Tools
Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE)
Classifier: Typing :: Stubs Only
Requires-Python: >=3.7
Description-Content-Type: text/markdown
# micropython-stdlib-stubs
A limited size copy of typesheds stdlib directory.
https://github.com/python/typeshed/tree/main/stdlib
This is used as a dependency in the micropython-*-stub packages to allow overriding of some of the stdlib modules with MicroPython specific implementations.
MicroPython specific updates to:
- collections
If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions)
For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/
* List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html
## Building
This is a short description of the steps taken to create or update the stubs for the micropython-stdlib-stubs distribution.
this package is built using `uv build` and published using `uv publish`.
## There are two possible reasons to rebuild
1. Rebuild in order to update from the reference-stubs or ducument-stubs. This will use the same typeshed stubs, and infuse or enrich them with new information from MicroPython.
2. Rebuild in order to update to a newer version of theof the typeshedrepo.for this you will need to manually advance the typeset repo to a newer version. Perhaps the newest versionand then rerun the update script it might be that due to the changes in the base 5 stubs that you'll need to make updates to the updatescript in order to accommodate for that.
*Steps*
- clone the typeshed repository
```bash
cd repos
git clone https://github.com/python/typeshed.git
cd typeshed
git checkout <commit-hash>
```
- update the version in pyproject toml to the new version ( .postnn)
- from the publish/micropython-stdlib-stubs directory
- run `python build.py`
- update and publish the micropython-stdlib-stubs package
```bash
uv publish
```
- commit the changes to the micropython-stdlib-stubs repository
## asyncio notes
## asyncio
- no _asyncio.pyi stubs to avoid conflict with stdlib re-use
- Generators
are not values - but are always callables, or coroutines
so rewrite
`open_connection: Generator ## = <generator>`
to
`async def open_connection () -> Callable[..., Awaitable[Tuple[StreamReader, StreamWriter]]]`
or similar
## Publish to PyPi
Publish using `uv publish`.

View File

@@ -0,0 +1,83 @@
_mpy_shed/IRQs.pyi,sha256=hCqQDClcgx77uDiho57KFrS9HYVvD0zh0Smcs09KIgk,808
_mpy_shed/__init__.pyi,sha256=821KoaqheWqvdKcrZjT9wX9USu8usFvptCJkZbmZsW4,3753
_mpy_shed/__pycache__/mp_implementation.cpython-312.pyc,,
_mpy_shed/_collections_abc.pyi,sha256=qthswhc_-15h8F74Fb9Bu-aKXcgwdiKNaikkPnNeMvs,3241
_mpy_shed/blockdevice.pyi,sha256=oT5r8wKFBQcds6uGt4V6voRZnYUg2HaDXEbpt8UwWXc,10065
_mpy_shed/buffer_mp.pyi,sha256=P7SGDfT8i0St5mhj9HwH_6J6WoP2Hq9Zhsh4k6Pf2Bs,450
_mpy_shed/collections/__init__.pyi,sha256=DKGew2BelWHgR31vYu4UCMpGOPF1DJpoZIXcvJN6th4,24476
_mpy_shed/collections/abc.pyi,sha256=zgovA8n11pYi3KyJJJeHveG0G3KpnvsljqQ5QirtDpk,85
_mpy_shed/io_modes.pyi,sha256=JiP86MvsJpGFCIIxhkYvcI8xvgdKKPoA_nMKo8dDkIc,1762
_mpy_shed/io_mp.pyi,sha256=paBaW8dgurf4SF6ZAT4O8yzc_XxI9u6WIwYL5SHIrUk,1954
_mpy_shed/mp_available.pyi,sha256=JDAwXLHVWG4eaCzhaL-MuG1QUJWud2O68WhRlAwYOiM,868
_mpy_shed/mp_implementation.py,sha256=Lvx6d7p16eVdIhUCEPISyBQo1MmoRsdyPXKa351TZJs,1057
_mpy_shed/neopixelbase.pyi,sha256=fHfGkJCrn1JoWu2_sgSIHu7igdqfLxHjOc15lUaBFeQ,621
_mpy_shed/pathlike.pyi,sha256=kW6M9m1ekNlohS2GxrWUHfu1WOJcr4yAACueFKXgUhg,684
_mpy_shed/subscriptable.pyi,sha256=nKVNhNjinrzFYaR0DqE7cEyN3jqJsvhxTnswzRGmdAY,644
_mpy_shed/time_mp.pyi,sha256=aHVWS8uJJIHEWqawx2IwT5zUNauv9b4LRdi1Sph5s9I,658
micropython_stdlib_stubs-1.26.0.post3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
micropython_stdlib_stubs-1.26.0.post3.dist-info/METADATA,sha256=FxCF4g1NHo5lnX-LG2Z3W8tsOI-1G8FDPAm9JUVly5U,3518
micropython_stdlib_stubs-1.26.0.post3.dist-info/RECORD,,
micropython_stdlib_stubs-1.26.0.post3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
micropython_stdlib_stubs-1.26.0.post3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
micropython_stdlib_stubs-1.26.0.post3.dist-info/licenses/LICENSE.md,sha256=XnIlPftszZeoPSWf1jwR9a1w2zp3zOL_-oC0qRi-gbE,13067
micropython_stdlib_stubs-1.26.0.post3.dist-info/licenses/LICENSE_typeshed,sha256=E8ceCWKDbYhQdivaD2I0_yEwNIcfpmtsQpYlSdc1F6Q,12894
stdlib/VERSIONS,sha256=mM3ASvFIjLqG4GJyqeZt6hRAEr8B5reRT7dCmtbYYDw,5995
stdlib/__future__.pyi,sha256=qIwWDmjaw3XCiulKYoKBQB_eJjLxweesUKwBdpkgQkU,915
stdlib/_ast.pyi,sha256=Hp9iS4qcJfo4KlueBTaYkR9IfdelifXU3bBhnAe_tmw,3466
stdlib/_codecs.pyi,sha256=dWiEtxcI9QnZYo0tGkMS14ksIK8Nn1jQvoHznsDRf20,6919
stdlib/_collections_abc.pyi,sha256=tAxXudfHJqSZco-VYEA3KdeHisUgq5iGleLPkylU0-8,2778
stdlib/_decimal.pyi,sha256=CGJcsPJpGBn_uIAse-6mJCtssMMcHsWiJGTnZdTVOI8,2105
stdlib/_typeshed/README.md,sha256=upGLmqNVRlXIE70i0vGA969dc26GVqU9Gs5cHEzb8Ys,1043
stdlib/_typeshed/__init__.pyi,sha256=W67NAhSV3NF1R4na73c966olOqM3NHheEu3Z-8PlRO0,12563
stdlib/_typeshed/dbapi.pyi,sha256=DbFvZC7aeSFuw_hopshe-nz6OL_btPB06zIoJ8O-9tA,1636
stdlib/_typeshed/importlib.pyi,sha256=iSR1SQrIgH39dZwu1o0M0qk8ZsxRUkn4DtG2_K5tO4o,727
stdlib/_typeshed/wsgi.pyi,sha256=6sb45JIA9DuSd1hYwxt2418TU6E4tVFiIfflHKMnpnE,1614
stdlib/_typeshed/xml.pyi,sha256=W4c9PcHw737FUoezcPAkfRuoMB--7Up7uKlZ0ShNIG0,499
stdlib/abc.pyi,sha256=oli4JypsePdvKt1xAB0sqDFbX1aUYddNRzj2BP65M-w,1987
stdlib/array.pyi,sha256=CqeQucdU-czxtV4634Q-sUeQ_relrCnXQxJ86lAMGZg,24658
stdlib/asyncio/__init__.pyi,sha256=aJXL2LflxMaFW91K3mH1L4RakIV4bR_ZuHMnDn51eRg,1337
stdlib/asyncio/base_events.pyi,sha256=av8dDqKw4UttfGfla1x780I2Q6kVrNcQ59DlHea5a0k,19986
stdlib/asyncio/base_futures.pyi,sha256=64lMK_8YEQQoxRnN-2OAQzKnEx9F9VVQ4GKIQKeqFxE,749
stdlib/asyncio/base_tasks.pyi,sha256=PYv3qwMz2WIqDs3GGdLTaJfiJBTuUwIK0PUEKHIl9Uc,413
stdlib/asyncio/constants.pyi,sha256=aQWt89UfXp0rZM29OQDAGGlGzieOr6dAQ6nlSS5gjAU,576
stdlib/asyncio/coroutines.pyi,sha256=ndCXCDSwGk4ZLKmlNCW59cbfxwc2cExSYyrkpV63TDM,1062
stdlib/asyncio/events.pyi,sha256=BPITbGyq8A1SbwxsdTkLYzANtPOQ95R5qVK5ExptrAc,25189
stdlib/asyncio/exceptions.pyi,sha256=sSiIocmo4Zgxt7_n-7ms-FthtJTCmEC4dbNuN0R11Pc,1142
stdlib/asyncio/format_helpers.pyi,sha256=ur-vKOrzAmO4JvC4YmbVuQhTDi8giSx0ym7_Uu91nxw,1334
stdlib/asyncio/futures.pyi,sha256=kSqape-NQOeANur5Z9e6lJMNj4rTgDtsRv5C8C4skLU,653
stdlib/asyncio/locks.pyi,sha256=2a1PhjkhMpTRmCS49kkF0r7YGwocFL6Gio3s8oGGMBo,4391
stdlib/asyncio/log.pyi,sha256=--UJmDmbuqm1EdrcBW5c94k3pzoNwlZKjsqn-ckSpPA,42
stdlib/asyncio/micropython.pyi,sha256=yKAHRdTaLJXYCPPJTWf6r9k7YiW11heX01YIDzW4nhg,1153
stdlib/asyncio/mixins.pyi,sha256=M8E77-G6AYPE2HyZEua5Rht1DP5-URJ2rBDPSmkiclA,224
stdlib/asyncio/proactor_events.pyi,sha256=zQnjKl-JdZqz6p4L-VySG6rDjb0_DF0r87twIdaWsvM,2596
stdlib/asyncio/protocols.pyi,sha256=3ooDCGHhxxYPcM5o20stbqOPNd-RBbiIDNk4ungvJqU,1665
stdlib/asyncio/queues.pyi,sha256=M71sCrWslDdIlWEZQ4Uk9p2TbZmv-GHWAJtGLFTYA_o,1918
stdlib/asyncio/readme.md,sha256=CmUrpswtK8eLayHzofxLEDDwPYryiyVlK3G2t2y9C6o,357
stdlib/asyncio/runners.pyi,sha256=DO4xjsc9DNqIqnNg_HdrBbweDWfZrHguZH8DDZKB9Mo,1205
stdlib/asyncio/selector_events.pyi,sha256=-40IJS-J9MsqcwKb9SkSsO-5-679O_7ocPwOZQZ44yA,231
stdlib/asyncio/sslproto.pyi,sha256=rqtzXumHJODjJt0YsYzA9BVk_16lEKFydp4Lo_EOFtE,6615
stdlib/asyncio/staggered.pyi,sha256=Qwgygm1Wd5ydD1Q9Iwu1lCq5uHRl0q_p5wca_HD7ask,351
stdlib/asyncio/streams.pyi,sha256=x-l1vbdqy8bPq6i0B8X4mzF4jfkkdIl-UHopz_hrCVM,6785
stdlib/asyncio/tasks.pyi,sha256=tEtbuaBujf8PZN4GYCIl2ZyOBr-QeQ22I5KEQJa1Fdo,17702
stdlib/asyncio/threads.pyi,sha256=MEYiLgK_Q1coLUEaPtNQdmOWOMnGaofaSV_vshKvyQE,275
stdlib/asyncio/timeouts.pyi,sha256=5LCrJFI5pNOFfQWPPTbljcMrAIbvYG8AzuTf5-QAt1g,672
stdlib/asyncio/transports.pyi,sha256=eFNxnqiwNWvstKzb5aMGbIvWxaIAdNX5msIOWqZfxvo,2087
stdlib/asyncio/trsock.pyi,sha256=hbdvHTSOHTInMcYxuj32WkQ2nbvcDdmcXooQxjvddYg,4714
stdlib/builtins.pyi,sha256=TR7v_4KLL5OhAzD8dmYI0y7gY-IJDhPVEIvDAXqsvkA,91758
stdlib/collections/__init__.pyi,sha256=a6HGdIW4RrokNu1TGz9U52B20u6jxcMpu_bxC0OeaxA,27912
stdlib/collections/abc.pyi,sha256=kBiZAN0VPf8qpkInbjqKZRRe0PXrZ7jxNmCGs4o5UOc,79
stdlib/enum.pyi,sha256=x5vIwgtEe_VLYdCC30GwurkMHOBzrNF31QXaUrEIi5w,12074
stdlib/io.pyi,sha256=cprNnue_TSUsS9-SSKYTjX_1y2rhTp_3Bp1LwxMWQj8,46125
stdlib/json/__init__.pyi,sha256=RSCtGbMWkNbZdsQ2DoXRW1p8SesGlpfz3e1FwlOtfHc,10005
stdlib/os/__init__.pyi,sha256=YWq8bkIvatKsakZi8NgbqpkRgUrwk873pPPhYFKZDIU,58598
stdlib/re.pyi,sha256=M25Rpd8KC_5Aw5heVggCVmbxuxAb28c3fu0Pjt8cssM,11784
stdlib/sre_compile.pyi,sha256=xhzSJueTiaUdn2OeQbQ7xPqDEWEitDfRA9n76sIUm94,304
stdlib/sre_constants.pyi,sha256=Z7OUqL_OUe75kpGXw0tHCG4CN62AoY-5FyrSlXFuQOs,3794
stdlib/sre_parse.pyi,sha256=9PT58-Q2oMDqtejzbWG2D5-UiBo8mQnptiAevjW7ZyQ,3790
stdlib/ssl.pyi,sha256=lfADY9On5yncg5VvRo604wAmJM8mA9LMSndAlDcMayw,25657
stdlib/struct.pyi,sha256=Pvce2bp43lUS1vG9xCV4EypjJqH6_WokPudxrTSJrIA,5579
stdlib/sys/__init__.pyi,sha256=4ntB01895ZnSYU4pQymzSrEpizm5mp9-rrDpACQ-U50,28576
stdlib/types.pyi,sha256=H_o9m3Le36mQWUCTtW061R5TMFP3EnqLSvupvWJ6jmg,21898
stdlib/typing.pyi,sha256=DE2DSoBLttgjFWp-27b8krO5RNkxZ3xUwMTwRRtHWtY,39273
stdlib/typing_extensions.pyi,sha256=AQILGFIJ8YG-c7QnIDncfFLHA3wVC72Ng-c80HSJ16Q,16296
stubs/mypy-extensions/mypy_extensions.pyi,sha256=LIU5CWrCyJ6G8xqMM_P3fztnO2y177WwSk0HXk_l-4M,9102

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.27.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,239 @@
MIT License
Copyright (c) 2023 Jos Verlinde
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
Parts of this package are licenced are licensed under different licenses , reproduced below.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
The "typeshed" project is licensed under the terms of the Apache license, as
reproduced below.
= = = = =
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
= = = = =
Parts of typeshed are licensed under different licenses (like the MIT
license), reproduced below.
= = = = =
The MIT License
Copyright (c) 2015 Jukka Lehtosalo and contributors
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
= = = = =

View File

@@ -0,0 +1,237 @@
The "typeshed" project is licensed under the terms of the Apache license, as
reproduced below.
= = = = =
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
= = = = =
Parts of typeshed are licensed under different licenses (like the MIT
license), reproduced below.
= = = = =
The MIT License
Copyright (c) 2015 Jukka Lehtosalo and contributors
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
= = = = =

View File

@@ -0,0 +1,105 @@
Metadata-Version: 2.4
Name: mpremote
Version: 1.26.1
Summary: Tool for interacting remotely with MicroPython devices
Project-URL: Homepage, https://github.com/micropython/micropython
Author-email: Damien George <damien@micropython.org>
License: MIT
License-File: LICENSE
Keywords: hardware,micropython
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 3
Classifier: Topic :: Software Development :: Embedded Systems
Classifier: Topic :: System :: Hardware
Requires-Python: >=3.4
Requires-Dist: importlib-metadata>=1.4; python_version < '3.8'
Requires-Dist: platformdirs>=4.3.7
Requires-Dist: pyserial>=3.3
Description-Content-Type: text/markdown
# mpremote -- MicroPython remote control
This CLI tool provides an integrated set of utilities to remotely interact with
and automate a MicroPython device over a serial connection.
The simplest way to use this tool is:
mpremote
This will automatically connect to a USB serial port and provide an interactive REPL.
The full list of supported commands are:
mpremote connect <device> -- connect to given device
device may be: list, auto, id:x, port:x
or any valid device name/path
mpremote disconnect -- disconnect current device
mpremote mount <local-dir> -- mount local directory on device
mpremote eval <string> -- evaluate and print the string
mpremote exec <string> -- execute the string
mpremote run <file> -- run the given local script
mpremote fs <command> <args...> -- execute filesystem commands on the device
command may be: cat, ls, cp, rm, mkdir, rmdir, sha256sum
use ":" as a prefix to specify a file on the device
mpremote repl -- enter REPL
options:
--capture <file>
--inject-code <string>
--inject-file <file>
mpremote mip install <package...> -- Install packages (from micropython-lib or third-party sources)
options:
--target <path>
--index <url>
--no-mpy
mpremote help -- print list of commands and exit
Multiple commands can be specified and they will be run sequentially. Connection
and disconnection will be done automatically at the start and end of the execution
of the tool, if such commands are not explicitly given. Automatic connection will
search for the first available serial device. If no action is specified then the
REPL will be entered.
Shortcuts can be defined using the macro system. Built-in shortcuts are:
- a0, a1, a2, a3: connect to `/dev/ttyACM?`
- u0, u1, u2, u3: connect to `/dev/ttyUSB?`
- c0, c1, c2, c3: connect to `COM?`
- cat, ls, cp, rm, mkdir, rmdir, df: filesystem commands
- reset: reset the device
- bootloader: make the device enter its bootloader
Any user configuration, including user-defined shortcuts, can be placed in
.config/mpremote/config.py. For example:
# Custom macro commands
commands = {
"c33": "connect id:334D335C3138",
"bl": "bootloader",
"double x=4": {
"command": "eval x*2",
"help": "multiply by two"
}
}
Examples:
mpremote
mpremote a1
mpremote connect /dev/ttyUSB0 repl
mpremote ls
mpremote a1 ls
mpremote exec "import micropython; micropython.mem_info()"
mpremote eval 1/2 eval 3/4
mpremote mount .
mpremote mount . exec "import local_script"
mpremote ls
mpremote cat boot.py
mpremote cp :main.py .
mpremote cp main.py :
mpremote cp -r dir/ :
mpremote sha256sum :main.py
mpremote mip install aioble
mpremote mip install github:org/repo@branch
mpremote mip install gitlab:org/repo@branch

View File

@@ -0,0 +1,30 @@
../../../bin/mpremote,sha256=bxRhc--5JOlFN19yFeyBH9VXJIkmYE0TRc6U9CnmTA4,294
mpremote-1.26.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
mpremote-1.26.1.dist-info/METADATA,sha256=XiPdewkwrkIupLuKevz-YxqgbKpoZgy1aj7CzhavNa8,4271
mpremote-1.26.1.dist-info/RECORD,,
mpremote-1.26.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
mpremote-1.26.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
mpremote-1.26.1.dist-info/entry_points.txt,sha256=sgvfZwY5hhV3pe7WBKbJX-A3K4rPSalISc2Et9J5488,48
mpremote-1.26.1.dist-info/licenses/LICENSE,sha256=t5Ak-wggyYkaKWW9RhUOoCxzdThAgY9_-bVQ0fauFuU,1088
mpremote/__init__.py,sha256=uCMjbbM9nO3VqYqqvGIZIzXy3juW1hHD8b4LrW0mWiQ,438
mpremote/__main__.py,sha256=4uulmJ59a37e2DlZJwGhd0wmwvK1KHTbeDnhBWqpFtQ,84
mpremote/__pycache__/__init__.cpython-312.pyc,,
mpremote/__pycache__/__main__.cpython-312.pyc,,
mpremote/__pycache__/commands.cpython-312.pyc,,
mpremote/__pycache__/console.cpython-312.pyc,,
mpremote/__pycache__/main.cpython-312.pyc,,
mpremote/__pycache__/mip.cpython-312.pyc,,
mpremote/__pycache__/mp_errno.cpython-312.pyc,,
mpremote/__pycache__/repl.cpython-312.pyc,,
mpremote/__pycache__/romfs.cpython-312.pyc,,
mpremote/__pycache__/transport.cpython-312.pyc,,
mpremote/__pycache__/transport_serial.cpython-312.pyc,,
mpremote/commands.py,sha256=qMNSaRwLm_HQJhPcIOzZubtqGgAU1Zq2M0z_CdXlRhs,26587
mpremote/console.py,sha256=doc59IfVkosiqlexQlZrLRsQmtzFb5hrm7q2HabKnMU,5280
mpremote/main.py,sha256=7JS0_t8fKCwISGX9k4vxojWEGFYqVla9U5A4PPzVF8I,19305
mpremote/mip.py,sha256=IRQeQ0jkYxvo0EPVCaEHVoP5xSZ8JPT8qDdVBoE-Pko,7380
mpremote/mp_errno.py,sha256=xlupFzO1KKSe_nN9ogUIUZzJsCJKcsf0TyUyCDG5bvw,1280
mpremote/repl.py,sha256=-qEslvMdWDfGgbglvIqdT6zgwbtXeK_m_x2pGvODflo,4679
mpremote/romfs.py,sha256=j-aCEzxWg4kryrRDgO_mETEyg2xpZyxOnpS1mcVDmbU,4966
mpremote/transport.py,sha256=rTtyBJDYfzTPpLkcAJ30avaql6Ak055ekR513SURza0,7608
mpremote/transport_serial.py,sha256=Q3jFpmZIUBi1RLDBbU83EjnMD3lcAN2O8696Q-6fNTg,33927

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: hatchling 1.27.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,2 @@
[console_scripts]
mpremote = mpremote.main:main

View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2021-2022 Damien P. George
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@@ -0,0 +1,12 @@
try:
from importlib.metadata import version, PackageNotFoundError
try:
__version__ = version("mpremote")
except PackageNotFoundError:
# Error loading package version (e.g. running from source).
__version__ = "0.0.0-local"
except ImportError:
# importlib.metadata not available (e.g. CPython <3.8 without
# importlib_metadata compatibility package installed).
__version__ = "0.0.0-unknown"

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env python3
import sys
from mpremote import main
sys.exit(main.main())

View File

@@ -0,0 +1,746 @@
import binascii
import errno
import hashlib
import os
import sys
import tempfile
import zlib
import serial.tools.list_ports
from .transport import TransportError, TransportExecError, stdout_write_bytes
from .transport_serial import SerialTransport
from .romfs import make_romfs, VfsRomWriter
class CommandError(Exception):
pass
def do_connect(state, args=None):
dev = args.device[0] if args else "auto"
do_disconnect(state)
try:
if dev == "list":
# List attached devices.
for p in sorted(serial.tools.list_ports.comports()):
print(
"{} {} {:04x}:{:04x} {} {}".format(
p.device,
p.serial_number,
p.vid if isinstance(p.vid, int) else 0,
p.pid if isinstance(p.pid, int) else 0,
p.manufacturer,
p.product,
)
)
# Don't do implicit REPL command.
state.did_action()
elif dev == "auto":
# Auto-detect and auto-connect to the first available USB serial port.
for p in sorted(serial.tools.list_ports.comports()):
if p.vid is not None and p.pid is not None:
try:
state.transport = SerialTransport(p.device, baudrate=115200)
return
except TransportError as er:
if not er.args[0].startswith("failed to access"):
raise er
raise TransportError("no device found")
elif dev.startswith("id:"):
# Search for a device with the given serial number.
serial_number = dev[len("id:") :]
dev = None
for p in serial.tools.list_ports.comports():
if p.serial_number == serial_number:
state.transport = SerialTransport(p.device, baudrate=115200)
return
raise TransportError("no device with serial number {}".format(serial_number))
else:
# Connect to the given device.
if dev.startswith("port:"):
dev = dev[len("port:") :]
state.transport = SerialTransport(dev, baudrate=115200)
return
except TransportError as er:
msg = er.args[0]
if msg.startswith("failed to access"):
msg += " (it may be in use by another program)"
raise CommandError(msg)
def do_disconnect(state, _args=None):
if not state.transport:
return
try:
if state.transport.mounted:
if not state.transport.in_raw_repl:
state.transport.enter_raw_repl(soft_reset=False)
state.transport.umount_local()
if state.transport.in_raw_repl:
state.transport.exit_raw_repl()
except OSError:
# Ignore any OSError exceptions when shutting down, eg:
# - filesystem_command will close the connection if it had an error
# - umounting will fail if serial port disappeared
pass
state.transport.close()
state.transport = None
state._auto_soft_reset = True
def show_progress_bar(size, total_size, op="copying"):
if not sys.stdout.isatty():
return
verbose_size = 2048
bar_length = 20
if total_size < verbose_size:
return
elif size >= total_size:
# Clear progress bar when copy completes
print("\r" + " " * (13 + len(op) + bar_length) + "\r", end="")
else:
bar = size * bar_length // total_size
progress = size * 100 // total_size
print(
"\r ... {} {:3d}% [{}{}]".format(op, progress, "#" * bar, "-" * (bar_length - bar)),
end="",
)
def _remote_path_join(a, *b):
if not a:
a = "./"
result = a.rstrip("/")
for x in b:
result += "/" + x.strip("/")
return result
def _remote_path_dirname(a):
a = a.rsplit("/", 1)
if len(a) == 1:
return ""
else:
return a[0]
def _remote_path_basename(a):
return a.rsplit("/", 1)[-1]
def do_filesystem_cp(state, src, dest, multiple, check_hash=False):
if dest.startswith(":"):
dest_no_slash = dest.rstrip("/" + os.path.sep + (os.path.altsep or ""))
dest_exists = state.transport.fs_exists(dest_no_slash[1:])
dest_isdir = dest_exists and state.transport.fs_isdir(dest_no_slash[1:])
# A trailing / on dest forces it to be a directory.
if dest != dest_no_slash:
if not dest_isdir:
raise CommandError("cp: destination is not a directory")
dest = dest_no_slash
else:
dest_exists = os.path.exists(dest)
dest_isdir = dest_exists and os.path.isdir(dest)
if multiple:
if not dest_exists:
raise CommandError("cp: destination does not exist")
if not dest_isdir:
raise CommandError("cp: destination is not a directory")
# Download the contents of source.
try:
if src.startswith(":"):
data = state.transport.fs_readfile(src[1:], progress_callback=show_progress_bar)
filename = _remote_path_basename(src[1:])
else:
with open(src, "rb") as f:
data = f.read()
filename = os.path.basename(src)
except IsADirectoryError:
raise CommandError("cp: -r not specified; omitting directory")
# Write back to dest.
if dest.startswith(":"):
# If the destination path is just the directory, then add the source filename.
if dest_isdir:
dest = ":" + _remote_path_join(dest[1:], filename)
# Skip copy if the destination file is identical.
if check_hash:
try:
remote_hash = state.transport.fs_hashfile(dest[1:], "sha256")
source_hash = hashlib.sha256(data).digest()
# remote_hash will be None if the device doesn't support
# hashlib.sha256 (and therefore won't match).
if remote_hash == source_hash:
print("Up to date:", dest[1:])
return
except OSError:
pass
# Write to remote.
state.transport.fs_writefile(dest[1:], data, progress_callback=show_progress_bar)
else:
# If the destination path is just the directory, then add the source filename.
if dest_isdir:
dest = os.path.join(dest, filename)
# Write to local file.
with open(dest, "wb") as f:
f.write(data)
def do_filesystem_recursive_cp(state, src, dest, multiple, check_hash):
# Ignore trailing / on both src and dest. (Unix cp ignores them too)
src = src.rstrip("/" + os.path.sep + (os.path.altsep if os.path.altsep else ""))
dest = dest.rstrip("/" + os.path.sep + (os.path.altsep if os.path.altsep else ""))
# If the destination directory exists, then we copy into it. Otherwise we
# use the destination as the target.
if dest.startswith(":"):
dest_exists = state.transport.fs_exists(dest[1:])
else:
dest_exists = os.path.exists(dest)
# Recursively find all files to copy from a directory.
# `dirs` will be a list of dest split paths.
# `files` will be a list of `(dest split path, src joined path)`.
dirs = []
files = []
# For example, if src=/tmp/foo, with /tmp/foo/x.py and /tmp/foo/a/b/c.py,
# and if the destination directory exists, then we will have:
# dirs = [['foo'], ['foo', 'a'], ['foo', 'a', 'b']]
# files = [(['foo', 'x.py'], '/tmp/foo/x.py'), (['foo', 'a', 'b', 'c.py'], '/tmp/foo/a/b/c.py')]
# If the destination doesn't exist, then we will have:
# dirs = [['a'], ['a', 'b']]
# files = [(['x.py'], '/tmp/foo/x.py'), (['a', 'b', 'c.py'], '/tmp/foo/a/b/c.py')]
def _list_recursive(base, src_path, dest_path, src_join_fun, src_isdir_fun, src_listdir_fun):
src_path_joined = src_join_fun(base, *src_path)
if src_isdir_fun(src_path_joined):
if dest_path:
dirs.append(dest_path)
for entry in src_listdir_fun(src_path_joined):
_list_recursive(
base,
src_path + [entry],
dest_path + [entry],
src_join_fun,
src_isdir_fun,
src_listdir_fun,
)
else:
files.append(
(
dest_path,
src_path_joined,
)
)
if src.startswith(":"):
src_dirname = [_remote_path_basename(src[1:])]
dest_dirname = src_dirname if dest_exists else []
_list_recursive(
_remote_path_dirname(src[1:]),
src_dirname,
dest_dirname,
src_join_fun=_remote_path_join,
src_isdir_fun=state.transport.fs_isdir,
src_listdir_fun=lambda p: [x.name for x in state.transport.fs_listdir(p)],
)
else:
src_dirname = [os.path.basename(src)]
dest_dirname = src_dirname if dest_exists else []
_list_recursive(
os.path.dirname(src),
src_dirname,
dest_dirname,
src_join_fun=os.path.join,
src_isdir_fun=os.path.isdir,
src_listdir_fun=os.listdir,
)
# If no directories were encountered then we must have just had a file.
if not dirs:
return do_filesystem_cp(state, src, dest, multiple, check_hash)
def _mkdir(a, *b):
try:
if a.startswith(":"):
state.transport.fs_mkdir(_remote_path_join(a[1:], *b))
else:
os.mkdir(os.path.join(a, *b))
except FileExistsError:
pass
# Create the destination if necessary.
if not dest_exists:
_mkdir(dest)
# Create all sub-directories relative to the destination.
for d in dirs:
_mkdir(dest, *d)
# Copy all files, in sorted order to help it be deterministic.
files.sort()
for dest_path_split, src_path_joined in files:
if src.startswith(":"):
src_path_joined = ":" + src_path_joined
if dest.startswith(":"):
dest_path_joined = ":" + _remote_path_join(dest[1:], *dest_path_split)
else:
dest_path_joined = os.path.join(dest, *dest_path_split)
do_filesystem_cp(state, src_path_joined, dest_path_joined, False, check_hash)
def do_filesystem_recursive_rm(state, path, args):
if state.transport.fs_isdir(path):
if state.transport.mounted:
r_cwd = state.transport.eval("os.getcwd()")
abs_path = os.path.normpath(
os.path.join(r_cwd, path) if not os.path.isabs(path) else path
)
if isinstance(state.transport, SerialTransport) and abs_path.startswith(
f"{SerialTransport.fs_hook_mount}/"
):
raise CommandError(
f"rm -r not permitted on {SerialTransport.fs_hook_mount} directory"
)
for entry in state.transport.fs_listdir(path):
do_filesystem_recursive_rm(state, _remote_path_join(path, entry.name), args)
if path:
try:
state.transport.fs_rmdir(path)
if args.verbose:
print(f"removed directory: '{path}'")
except OSError as e:
if e.errno != errno.EINVAL: # not vfs mountpoint
raise CommandError(
f"rm -r: cannot remove :{path} {os.strerror(e.errno) if e.errno else ''}"
) from e
if args.verbose:
print(f"skipped: '{path}' (vfs mountpoint)")
else:
state.transport.fs_rmfile(path)
if args.verbose:
print(f"removed: '{path}'")
def human_size(size, decimals=1):
for unit in ["B", "K", "M", "G", "T"]:
if size < 1024.0 or unit == "T":
break
size /= 1024.0
return f"{size:.{decimals}f}{unit}" if unit != "B" else f"{int(size)}"
def do_filesystem_tree(state, path, args):
"""Print a tree of the device's filesystem starting at path."""
connectors = ("├── ", "└── ")
def _tree_recursive(path, prefix=""):
entries = state.transport.fs_listdir(path)
entries.sort(key=lambda e: e.name)
for i, entry in enumerate(entries):
connector = connectors[1] if i == len(entries) - 1 else connectors[0]
is_dir = entry.st_mode & 0x4000 # Directory
size_str = ""
# most MicroPython filesystems don't support st_size on directories, reduce clutter
if entry.st_size > 0 or not is_dir:
if args.size:
size_str = f"[{entry.st_size:>9}] "
elif args.human:
size_str = f"[{human_size(entry.st_size):>6}] "
print(f"{prefix}{connector}{size_str}{entry.name}")
if is_dir:
_tree_recursive(
_remote_path_join(path, entry.name),
prefix + (" " if i == len(entries) - 1 else ""),
)
if not path or path == ".":
path = state.transport.exec("import os;print(os.getcwd())").strip().decode("utf-8")
if not (path == "." or state.transport.fs_isdir(path)):
raise CommandError(f"tree: '{path}' is not a directory")
if args.verbose:
print(f":{path} on {state.transport.device_name}")
else:
print(f":{path}")
_tree_recursive(path)
def do_filesystem(state, args):
state.ensure_raw_repl()
state.did_action()
command = args.command[0]
paths = args.path
if command == "cat":
# Don't do verbose output for `cat` unless explicitly requested.
verbose = args.verbose is True
else:
verbose = args.verbose is not False
if command == "cp":
# Note: cp requires the user to specify local/remote explicitly via
# leading ':'.
# The last argument must be the destination.
if len(paths) <= 1:
raise CommandError("cp: missing destination path")
cp_dest = paths[-1]
paths = paths[:-1]
else:
# All other commands implicitly use remote paths. Strip the
# leading ':' if the user included them.
paths = [path[1:] if path.startswith(":") else path for path in paths]
# ls and tree implicitly lists the cwd.
if command in ("ls", "tree") and not paths:
paths = [""]
try:
# Handle each path sequentially.
for path in paths:
if verbose:
if command == "cp":
print("{} {} {}".format(command, path, cp_dest))
else:
print("{} :{}".format(command, path))
if command == "cat":
state.transport.fs_printfile(path)
elif command == "ls":
for result in state.transport.fs_listdir(path):
print(
"{:12} {}{}".format(
result.st_size, result.name, "/" if result.st_mode & 0x4000 else ""
)
)
elif command == "mkdir":
state.transport.fs_mkdir(path)
elif command == "rm":
if args.recursive:
do_filesystem_recursive_rm(state, path, args)
else:
state.transport.fs_rmfile(path)
elif command == "rmdir":
state.transport.fs_rmdir(path)
elif command == "touch":
state.transport.fs_touchfile(path)
elif command.endswith("sum") and command[-4].isdigit():
digest = state.transport.fs_hashfile(path, command[:-3])
print(digest.hex())
elif command == "cp":
if args.recursive:
do_filesystem_recursive_cp(
state, path, cp_dest, len(paths) > 1, not args.force
)
else:
do_filesystem_cp(state, path, cp_dest, len(paths) > 1, not args.force)
elif command == "tree":
do_filesystem_tree(state, path, args)
except OSError as er:
raise CommandError("{}: {}: {}.".format(command, er.strerror, os.strerror(er.errno)))
except TransportError as er:
raise CommandError("Error with transport:\n{}".format(er.args[0]))
def do_edit(state, args):
state.ensure_raw_repl()
state.did_action()
if not os.getenv("EDITOR"):
raise CommandError("edit: $EDITOR not set")
for src in args.files:
src = src.lstrip(":")
dest_fd, dest = tempfile.mkstemp(suffix=os.path.basename(src))
try:
print("edit :%s" % (src,))
state.transport.fs_touchfile(src)
data = state.transport.fs_readfile(src, progress_callback=show_progress_bar)
with open(dest_fd, "wb") as f:
f.write(data)
if os.system('%s "%s"' % (os.getenv("EDITOR"), dest)) == 0:
with open(dest, "rb") as f:
state.transport.fs_writefile(
src, f.read(), progress_callback=show_progress_bar
)
finally:
os.unlink(dest)
def _do_execbuffer(state, buf, follow):
state.ensure_raw_repl()
state.did_action()
try:
state.transport.exec_raw_no_follow(buf)
if follow:
ret, ret_err = state.transport.follow(timeout=None, data_consumer=stdout_write_bytes)
if ret_err:
stdout_write_bytes(ret_err)
sys.exit(1)
except TransportError as er:
raise CommandError(er.args[0])
except KeyboardInterrupt:
sys.exit(1)
def do_exec(state, args):
_do_execbuffer(state, args.expr[0], args.follow)
def do_eval(state, args):
buf = "print(" + args.expr[0] + ")"
_do_execbuffer(state, buf, True)
def do_run(state, args):
filename = args.path[0]
try:
with open(filename, "rb") as f:
buf = f.read()
except OSError:
raise CommandError(f"could not read file '{filename}'")
_do_execbuffer(state, buf, args.follow)
def do_mount(state, args):
state.ensure_raw_repl()
path = args.path[0]
state.transport.mount_local(path, unsafe_links=args.unsafe_links)
print(f"Local directory {path} is mounted at /remote")
def do_umount(state, path):
state.ensure_raw_repl()
state.transport.umount_local()
def do_resume(state, _args=None):
state._auto_soft_reset = False
def do_soft_reset(state, _args=None):
state.ensure_raw_repl(soft_reset=True)
state.did_action()
def do_rtc(state, args):
state.ensure_raw_repl()
state.did_action()
state.transport.exec("import machine")
if args.set:
import datetime
now = datetime.datetime.now()
timetuple = "({}, {}, {}, {}, {}, {}, {}, {})".format(
now.year,
now.month,
now.day,
now.weekday(),
now.hour,
now.minute,
now.second,
now.microsecond,
)
state.transport.exec("machine.RTC().datetime({})".format(timetuple))
else:
print(state.transport.eval("machine.RTC().datetime()"))
def _do_romfs_query(state, args):
state.ensure_raw_repl()
state.did_action()
# Detect the romfs and get its associated device.
state.transport.exec("import vfs")
if not state.transport.eval("hasattr(vfs,'rom_ioctl')"):
print("ROMFS is not enabled on this device")
return
num_rom_partitions = state.transport.eval("vfs.rom_ioctl(1)")
if num_rom_partitions <= 0:
print("No ROMFS partitions available")
return
for rom_id in range(num_rom_partitions):
state.transport.exec(f"dev=vfs.rom_ioctl(2,{rom_id})")
has_object = state.transport.eval("hasattr(dev,'ioctl')")
if has_object:
rom_block_count = state.transport.eval("dev.ioctl(4,0)")
rom_block_size = state.transport.eval("dev.ioctl(5,0)")
rom_size = rom_block_count * rom_block_size
print(
f"ROMFS{rom_id} partition has size {rom_size} bytes ({rom_block_count} blocks of {rom_block_size} bytes each)"
)
else:
rom_size = state.transport.eval("len(dev)")
print(f"ROMFS{rom_id} partition has size {rom_size} bytes")
romfs = state.transport.eval("bytes(memoryview(dev)[:12])")
print(f" Raw contents: {romfs.hex(':')} ...")
if not romfs.startswith(b"\xd2\xcd\x31"):
print(" Not a valid ROMFS")
else:
size = 0
for value in romfs[3:]:
size = (size << 7) | (value & 0x7F)
if not value & 0x80:
break
print(f" ROMFS image size: {size}")
def _do_romfs_build(state, args):
state.did_action()
if args.path is None:
raise CommandError("romfs build: source path not given")
input_directory = args.path
if args.output is None:
output_file = input_directory + ".romfs"
else:
output_file = args.output
romfs = make_romfs(input_directory, mpy_cross=args.mpy)
print(f"Writing {len(romfs)} bytes to output file {output_file}")
with open(output_file, "wb") as f:
f.write(romfs)
def _do_romfs_deploy(state, args):
state.ensure_raw_repl()
state.did_action()
transport = state.transport
if args.path is None:
raise CommandError("romfs deploy: source path not given")
rom_id = args.partition
romfs_filename = args.path
# Read in or create the ROMFS filesystem image.
if os.path.isfile(romfs_filename) and romfs_filename.endswith((".img", ".romfs")):
with open(romfs_filename, "rb") as f:
romfs = f.read()
else:
romfs = make_romfs(romfs_filename, mpy_cross=args.mpy)
print(f"Image size is {len(romfs)} bytes")
# Detect the ROMFS partition and get its associated device.
state.transport.exec("import vfs")
if not state.transport.eval("hasattr(vfs,'rom_ioctl')"):
raise CommandError("ROMFS is not enabled on this device")
transport.exec(f"dev=vfs.rom_ioctl(2,{rom_id})")
if transport.eval("isinstance(dev,int) and dev<0"):
raise CommandError(f"ROMFS{rom_id} partition not found on device")
has_object = transport.eval("hasattr(dev,'ioctl')")
if has_object:
rom_block_count = transport.eval("dev.ioctl(4,0)")
rom_block_size = transport.eval("dev.ioctl(5,0)")
rom_size = rom_block_count * rom_block_size
print(
f"ROMFS{rom_id} partition has size {rom_size} bytes ({rom_block_count} blocks of {rom_block_size} bytes each)"
)
else:
rom_size = transport.eval("len(dev)")
print(f"ROMFS{rom_id} partition has size {rom_size} bytes")
# Check if ROMFS image is valid
if not romfs.startswith(VfsRomWriter.ROMFS_HEADER):
print("Invalid ROMFS image")
sys.exit(1)
# Check if ROMFS filesystem image will fit in the target partition.
if len(romfs) > rom_size:
print("ROMFS image is too big for the target partition")
sys.exit(1)
# Prepare ROMFS partition for writing.
print(f"Preparing ROMFS{rom_id} partition for writing")
transport.exec("import vfs\ntry:\n vfs.umount('/rom')\nexcept:\n pass")
chunk_size = 4096
if has_object:
for offset in range(0, len(romfs), rom_block_size):
transport.exec(f"dev.ioctl(6,{offset // rom_block_size})")
chunk_size = min(chunk_size, rom_block_size)
else:
rom_min_write = transport.eval(f"vfs.rom_ioctl(3,{rom_id},{len(romfs)})")
chunk_size = max(chunk_size, rom_min_write)
# Detect capabilities of the device to use the fastest method of transfer.
has_bytes_fromhex = transport.eval("hasattr(bytes,'fromhex')")
try:
transport.exec("from binascii import a2b_base64")
has_a2b_base64 = True
except TransportExecError:
has_a2b_base64 = False
try:
transport.exec("from io import BytesIO")
transport.exec("from deflate import DeflateIO,RAW")
has_deflate_io = True
except TransportExecError:
has_deflate_io = False
# Deploy the ROMFS filesystem image to the device.
for offset in range(0, len(romfs), chunk_size):
romfs_chunk = romfs[offset : offset + chunk_size]
romfs_chunk += bytes(chunk_size - len(romfs_chunk))
if has_deflate_io:
# Needs: binascii.a2b_base64, io.BytesIO, deflate.DeflateIO.
compressor = zlib.compressobj(wbits=-9)
romfs_chunk_compressed = compressor.compress(romfs_chunk)
romfs_chunk_compressed += compressor.flush()
buf = binascii.b2a_base64(romfs_chunk_compressed).strip()
transport.exec(f"buf=DeflateIO(BytesIO(a2b_base64({buf})),RAW,9).read()")
elif has_a2b_base64:
# Needs: binascii.a2b_base64.
buf = binascii.b2a_base64(romfs_chunk)
transport.exec(f"buf=a2b_base64({buf})")
elif has_bytes_fromhex:
# Needs: bytes.fromhex.
buf = romfs_chunk.hex()
transport.exec(f"buf=bytes.fromhex('{buf}')")
else:
# Needs nothing special.
transport.exec("buf=" + repr(romfs_chunk))
print(f"\rWriting at offset {offset}", end="")
if has_object:
transport.exec(
f"dev.writeblocks({offset // rom_block_size},buf,{offset % rom_block_size})"
)
else:
transport.exec(f"vfs.rom_ioctl(4,{rom_id},{offset},buf)")
# Complete writing.
if not has_object:
transport.eval(f"vfs.rom_ioctl(5,{rom_id})")
print()
print("ROMFS image deployed")
def do_romfs(state, args):
if args.command[0] == "query":
_do_romfs_query(state, args)
elif args.command[0] == "build":
_do_romfs_build(state, args)
elif args.command[0] == "deploy":
_do_romfs_deploy(state, args)
else:
raise CommandError(
f"romfs: '{args.command[0]}' is not a command; pass romfs --help for a list"
)

View File

@@ -0,0 +1,176 @@
import sys, time
try:
import select, termios
except ImportError:
termios = None
select = None
import msvcrt, signal
class ConsolePosix:
def __init__(self):
self.infd = sys.stdin.fileno()
self.infile = sys.stdin.buffer
self.outfile = sys.stdout.buffer
if hasattr(self.infile, "raw"):
self.infile = self.infile.raw
if hasattr(self.outfile, "raw"):
self.outfile = self.outfile.raw
self.orig_attr = termios.tcgetattr(self.infd)
def enter(self):
# attr is: [iflag, oflag, cflag, lflag, ispeed, ospeed, cc]
attr = termios.tcgetattr(self.infd)
attr[0] &= ~(
termios.BRKINT | termios.ICRNL | termios.INPCK | termios.ISTRIP | termios.IXON
)
attr[1] = 0
attr[2] = attr[2] & ~(termios.CSIZE | termios.PARENB) | termios.CS8
attr[3] = 0
attr[6][termios.VMIN] = 1
attr[6][termios.VTIME] = 0
termios.tcsetattr(self.infd, termios.TCSANOW, attr)
def exit(self):
termios.tcsetattr(self.infd, termios.TCSANOW, self.orig_attr)
def waitchar(self, pyb_serial):
# TODO pyb_serial might not have fd
select.select([self.infd, pyb_serial.fd], [], [])
def readchar(self):
res = select.select([self.infd], [], [], 0)
if res[0]:
return self.infile.read(1)
else:
return None
def write(self, buf):
self.outfile.write(buf)
class ConsoleWindows:
KEY_MAP = {
b"H": b"A", # UP
b"P": b"B", # DOWN
b"M": b"C", # RIGHT
b"K": b"D", # LEFT
b"G": b"H", # POS1
b"O": b"F", # END
b"Q": b"6~", # PGDN
b"I": b"5~", # PGUP
b"s": b"1;5D", # CTRL-LEFT,
b"t": b"1;5C", # CTRL-RIGHT,
b"\x8d": b"1;5A", # CTRL-UP,
b"\x91": b"1;5B", # CTRL-DOWN,
b"w": b"1;5H", # CTRL-POS1
b"u": b"1;5F", # CTRL-END
b"\x98": b"1;3A", # ALT-UP,
b"\xa0": b"1;3B", # ALT-DOWN,
b"\x9d": b"1;3C", # ALT-RIGHT,
b"\x9b": b"1;3D", # ALT-LEFT,
b"\x97": b"1;3H", # ALT-POS1,
b"\x9f": b"1;3F", # ALT-END,
b"S": b"3~", # DEL,
b"\x93": b"3;5~", # CTRL-DEL
b"R": b"2~", # INS
b"\x92": b"2;5~", # CTRL-INS
b"\x94": b"Z", # Ctrl-Tab = BACKTAB,
}
def __init__(self):
self.ctrl_c = 0
def _sigint_handler(self, signo, frame):
self.ctrl_c += 1
def enter(self):
signal.signal(signal.SIGINT, self._sigint_handler)
def exit(self):
signal.signal(signal.SIGINT, signal.SIG_DFL)
def inWaiting(self):
return 1 if self.ctrl_c or msvcrt.kbhit() else 0
def waitchar(self, pyb_serial):
while not (self.inWaiting() or pyb_serial.inWaiting()):
time.sleep(0.01)
def readchar(self):
if self.ctrl_c:
self.ctrl_c -= 1
return b"\x03"
if msvcrt.kbhit():
ch = msvcrt.getch()
while ch in b"\x00\xe0": # arrow or function key prefix?
if not msvcrt.kbhit():
return None
ch = msvcrt.getch() # second call returns the actual key code
try:
ch = b"\x1b[" + self.KEY_MAP[ch]
except KeyError:
return None
return ch
def write(self, buf):
buf = buf.decode() if isinstance(buf, bytes) else buf
sys.stdout.write(buf)
sys.stdout.flush()
# for b in buf:
# if isinstance(b, bytes):
# msvcrt.putch(b)
# else:
# msvcrt.putwch(b)
if termios:
Console = ConsolePosix
VT_ENABLED = True
else:
Console = ConsoleWindows
# Windows VT mode ( >= win10 only)
# https://bugs.python.org/msg291732
import ctypes, os
from ctypes import wintypes
kernel32 = ctypes.WinDLL("kernel32", use_last_error=True)
ERROR_INVALID_PARAMETER = 0x0057
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004
def _check_bool(result, func, args):
if not result:
raise ctypes.WinError(ctypes.get_last_error())
return args
LPDWORD = ctypes.POINTER(wintypes.DWORD)
kernel32.GetConsoleMode.errcheck = _check_bool
kernel32.GetConsoleMode.argtypes = (wintypes.HANDLE, LPDWORD)
kernel32.SetConsoleMode.errcheck = _check_bool
kernel32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def set_conout_mode(new_mode, mask=0xFFFFFFFF):
# don't assume StandardOutput is a console.
# open CONOUT$ instead
fdout = os.open("CONOUT$", os.O_RDWR)
try:
hout = msvcrt.get_osfhandle(fdout)
old_mode = wintypes.DWORD()
kernel32.GetConsoleMode(hout, ctypes.byref(old_mode))
mode = (new_mode & mask) | (old_mode.value & ~mask)
kernel32.SetConsoleMode(hout, mode)
return old_mode.value
finally:
os.close(fdout)
# def enable_vt_mode():
mode = mask = ENABLE_VIRTUAL_TERMINAL_PROCESSING
try:
set_conout_mode(mode, mask)
VT_ENABLED = True
except WindowsError:
VT_ENABLED = False

View File

@@ -0,0 +1,636 @@
"""
MicroPython Remote - Interaction and automation tool for MicroPython
MIT license; Copyright (c) 2019-2022 Damien P. George
This program provides a set of utilities to interact with and automate a
MicroPython device over a serial connection. Commands supported are:
mpremote -- auto-detect, connect and enter REPL
mpremote <device-shortcut> -- connect to given device
mpremote connect <device> -- connect to given device
mpremote disconnect -- disconnect current device
mpremote mount <local-dir> -- mount local directory on device
mpremote eval <string> -- evaluate and print the string
mpremote exec <string> -- execute the string
mpremote run <script> -- run the given local script
mpremote fs <command> <args...> -- execute filesystem commands on the device
mpremote repl -- enter REPL
"""
import argparse
import os, sys, time
from collections.abc import Mapping
from textwrap import dedent
import platformdirs
from .commands import (
CommandError,
do_connect,
do_disconnect,
do_edit,
do_filesystem,
do_mount,
do_umount,
do_exec,
do_eval,
do_run,
do_resume,
do_rtc,
do_soft_reset,
do_romfs,
)
from .mip import do_mip
from .repl import do_repl
_PROG = "mpremote"
def do_sleep(state, args):
time.sleep(args.ms[0])
def do_help(state, _args=None):
def print_commands_help(cmds, help_key):
max_command_len = max(len(cmd) for cmd in cmds.keys())
for cmd in sorted(cmds.keys()):
help_message_lines = dedent(help_key(cmds[cmd])).split("\n")
help_message = help_message_lines[0]
for line in help_message_lines[1:]:
help_message = "{}\n{}{}".format(help_message, " " * (max_command_len + 4), line)
print(" ", cmd, " " * (max_command_len - len(cmd) + 2), help_message, sep="")
print(_PROG, "-- MicroPython remote control")
print("See https://docs.micropython.org/en/latest/reference/mpremote.html")
print("\nList of commands:")
print_commands_help(
_COMMANDS, lambda x: x[1]().description
) # extract description from argparse
print("\nList of shortcuts:")
print_commands_help(_command_expansions, lambda x: x[2]) # (args, sub, help_message)
sys.exit(0)
def do_version(state, _args=None):
from . import __version__
print(f"{_PROG} {__version__}")
sys.exit(0)
def _bool_flag(cmd_parser, name, short_name, default, description):
# In Python 3.9+ this can be replaced with argparse.BooleanOptionalAction.
group = cmd_parser.add_mutually_exclusive_group()
group.add_argument(
"--" + name,
"-" + short_name,
action="store_true",
default=default,
help=description,
)
group.add_argument(
"--no-" + name,
action="store_false",
dest=name,
)
def argparse_connect():
cmd_parser = argparse.ArgumentParser(description="connect to given device")
cmd_parser.add_argument(
"device", nargs=1, help="Either list, auto, id:x, port:x, or any valid device name/path"
)
return cmd_parser
def argparse_sleep():
cmd_parser = argparse.ArgumentParser(description="sleep before executing next command")
cmd_parser.add_argument("ms", nargs=1, type=float, help="milliseconds to sleep for")
return cmd_parser
def argparse_edit():
cmd_parser = argparse.ArgumentParser(description="edit files on the device")
cmd_parser.add_argument("files", nargs="+", help="list of remote paths")
return cmd_parser
def argparse_mount():
cmd_parser = argparse.ArgumentParser(description="mount local directory on device")
_bool_flag(
cmd_parser,
"unsafe-links",
"l",
False,
"follow symbolic links pointing outside of local directory",
)
cmd_parser.add_argument("path", nargs=1, help="local path to mount")
return cmd_parser
def argparse_repl():
cmd_parser = argparse.ArgumentParser(description="connect to given device")
_bool_flag(cmd_parser, "escape-non-printable", "e", False, "escape non-printable characters")
cmd_parser.add_argument(
"--capture",
type=str,
required=False,
help="saves a copy of the REPL session to the specified path",
)
cmd_parser.add_argument(
"--inject-code", type=str, required=False, help="code to be run when Ctrl-J is pressed"
)
cmd_parser.add_argument(
"--inject-file",
type=str,
required=False,
help="path to file to be run when Ctrl-K is pressed",
)
return cmd_parser
def argparse_eval():
cmd_parser = argparse.ArgumentParser(description="evaluate and print the string")
cmd_parser.add_argument("expr", nargs=1, help="expression to execute")
return cmd_parser
def argparse_exec():
cmd_parser = argparse.ArgumentParser(description="execute the string")
_bool_flag(
cmd_parser, "follow", "f", True, "follow output until the expression completes (default)"
)
cmd_parser.add_argument("expr", nargs=1, help="expression to execute")
return cmd_parser
def argparse_run():
cmd_parser = argparse.ArgumentParser(description="run the given local script")
_bool_flag(
cmd_parser, "follow", "f", True, "follow output until the script completes (default)"
)
cmd_parser.add_argument("path", nargs=1, help="path to script to execute")
return cmd_parser
def argparse_rtc():
cmd_parser = argparse.ArgumentParser(description="get (default) or set the device RTC")
_bool_flag(cmd_parser, "set", "s", False, "set the RTC to the current local time")
return cmd_parser
def argparse_filesystem():
cmd_parser = argparse.ArgumentParser(
description="execute filesystem commands on the device",
add_help=False,
)
cmd_parser.add_argument("--help", action="help", help="show this help message and exit")
_bool_flag(cmd_parser, "recursive", "r", False, "recursive (for cp and rm commands)")
_bool_flag(
cmd_parser,
"force",
"f",
False,
"force copy even if file is unchanged (for cp command only)",
)
_bool_flag(
cmd_parser,
"verbose",
"v",
None,
"enable verbose output (defaults to True for all commands except cat)",
)
size_group = cmd_parser.add_mutually_exclusive_group()
size_group.add_argument(
"--size",
"-s",
default=False,
action="store_true",
help="show file size in bytes(tree command only)",
)
size_group.add_argument(
"--human",
"-h",
default=False,
action="store_true",
help="show file size in a more human readable way (tree command only)",
)
cmd_parser.add_argument(
"command",
nargs=1,
help="filesystem command (e.g. cat, cp, sha256sum, ls, rm, rmdir, touch, tree)",
)
cmd_parser.add_argument("path", nargs="+", help="local and remote paths")
return cmd_parser
def argparse_mip():
cmd_parser = argparse.ArgumentParser(
description="install packages from micropython-lib or third-party sources"
)
_bool_flag(cmd_parser, "mpy", "m", True, "download as compiled .mpy files (default)")
cmd_parser.add_argument(
"--target", type=str, required=False, help="destination direction on the device"
)
cmd_parser.add_argument(
"--index",
type=str,
required=False,
help="package index to use (defaults to micropython-lib)",
)
cmd_parser.add_argument("command", nargs=1, help="mip command (e.g. install)")
cmd_parser.add_argument(
"packages",
nargs="+",
help="list package specifications, e.g. name, name@version, github:org/repo, github:org/repo@branch, gitlab:org/repo, gitlab:org/repo@branch",
)
return cmd_parser
def argparse_romfs():
cmd_parser = argparse.ArgumentParser(description="manage ROM partitions")
_bool_flag(
cmd_parser,
"mpy",
"m",
True,
"automatically compile .py files to .mpy when building the ROMFS image (default)",
)
cmd_parser.add_argument(
"--partition",
"-p",
type=int,
default=0,
help="ROMFS partition to use",
)
cmd_parser.add_argument(
"--output",
"-o",
help="output file",
)
cmd_parser.add_argument("command", nargs=1, help="romfs command, one of: query, build, deploy")
cmd_parser.add_argument("path", nargs="?", help="path to directory to deploy")
return cmd_parser
def argparse_none(description):
return lambda: argparse.ArgumentParser(description=description)
# Map of "command" to tuple of (handler_func, argparse_func).
_COMMANDS = {
"connect": (
do_connect,
argparse_connect,
),
"sleep": (
do_sleep,
argparse_sleep,
),
"disconnect": (
do_disconnect,
argparse_none("disconnect current device"),
),
"edit": (
do_edit,
argparse_edit,
),
"resume": (
do_resume,
argparse_none("resume a previous mpremote session (will not auto soft-reset)"),
),
"soft-reset": (
do_soft_reset,
argparse_none("perform a soft-reset of the device"),
),
"mount": (
do_mount,
argparse_mount,
),
"umount": (
do_umount,
argparse_none("unmount the local directory"),
),
"repl": (
do_repl,
argparse_repl,
),
"eval": (
do_eval,
argparse_eval,
),
"exec": (
do_exec,
argparse_exec,
),
"run": (
do_run,
argparse_run,
),
"rtc": (
do_rtc,
argparse_rtc,
),
"fs": (
do_filesystem,
argparse_filesystem,
),
"mip": (
do_mip,
argparse_mip,
),
"help": (
do_help,
argparse_none("print help and exit"),
),
"version": (
do_version,
argparse_none("print version and exit"),
),
"romfs": (
do_romfs,
argparse_romfs,
),
}
# Additional commands aliases.
# The value can either be:
# - A command string.
# - A list of command strings, each command will be executed sequentially.
# - A dict of command: { [], help: ""}
_BUILTIN_COMMAND_EXPANSIONS = {
# Device connection shortcuts.
"devs": {
"command": "connect list",
"help": "list available serial ports",
},
# Filesystem shortcuts (use `cp` instead of `fs cp`).
"cat": "fs cat",
"cp": "fs cp",
"ls": "fs ls",
"mkdir": "fs mkdir",
"rm": "fs rm",
"rmdir": "fs rmdir",
"sha256sum": "fs sha256sum",
"touch": "fs touch",
"tree": "fs tree",
# Disk used/free.
"df": [
"exec",
"""
import os,vfs
_f = "{:<10}{:>9}{:>9}{:>9}{:>5} {}"
print(_f.format("filesystem", "size", "used", "avail", "use%", "mounted on"))
try:
_ms = vfs.mount()
except:
_ms = []
for _m in [""] + os.listdir("/"):
_m = "/" + _m
_s = os.stat(_m)
if _s[0] & 1 << 14:
_ms.append(("<unknown>",_m))
for _v,_p in _ms:
_s = os.statvfs(_p)
_sz = _s[0]*_s[2]
if _sz:
_av = _s[0]*_s[3]
_us = 100*(_sz-_av)//_sz
print(_f.format(str(_v), _sz, _sz-_av, _av, _us, _p))
""",
],
# Other shortcuts.
"reset": {
"command": [
"exec",
"--no-follow",
"import time, machine; time.sleep_ms(100); machine.reset()",
],
"help": "hard reset the device",
},
"bootloader": {
"command": [
"exec",
"--no-follow",
"import time, machine; time.sleep_ms(100); machine.bootloader()",
],
"help": "make the device enter its bootloader",
},
# Simple aliases.
"--help": "help",
"--version": "version",
}
# Add "a0", "a1", ..., "u0", "u1", ..., "c0", "c1", ... as aliases
# for "connect /dev/ttyACMn" (and /dev/ttyUSBn, COMn) etc.
for port_num in range(4):
for prefix, port in [("a", "/dev/ttyACM"), ("u", "/dev/ttyUSB"), ("c", "COM")]:
_BUILTIN_COMMAND_EXPANSIONS["{}{}".format(prefix, port_num)] = {
"command": "connect {}{}".format(port, port_num),
"help": 'connect to serial port "{}{}"'.format(port, port_num),
}
def load_user_config():
# Create empty config object.
config = __build_class__(lambda: None, "Config")()
config.commands = {}
# Get config file name.
path = platformdirs.user_config_dir(appname=_PROG, appauthor=False)
config_file = os.path.join(path, "config.py")
# Check if config file exists.
if not os.path.exists(config_file):
return config
# Exec the config file in its directory.
with open(config_file) as f:
config_data = f.read()
prev_cwd = os.getcwd()
os.chdir(path)
# Pass in the config path so that the config file can use it.
config.__dict__["config_path"] = path
config.__dict__["__file__"] = config_file
exec(config_data, config.__dict__)
os.chdir(prev_cwd)
return config
def prepare_command_expansions(config):
global _command_expansions
_command_expansions = {}
for command_set in (_BUILTIN_COMMAND_EXPANSIONS, config.commands):
for cmd, sub in command_set.items():
cmd = cmd.split()
if len(cmd) == 1:
args = ()
else:
args = tuple(c.split("=") for c in cmd[1:])
help_message = ""
if isinstance(sub, Mapping):
help_message = sub.get("help", "")
sub = sub["command"]
if isinstance(sub, str):
sub = sub.split()
_command_expansions[cmd[0]] = (args, sub, help_message)
def do_command_expansion(args):
def usage_error(cmd, exp_args, msg):
print(f"Command {cmd} {msg}; signature is:")
print(" ", cmd, " ".join("=".join(a) for a in exp_args))
sys.exit(1)
last_arg_idx = len(args)
pre = []
while args and args[0] in _command_expansions:
cmd = args.pop(0)
exp_args, exp_sub, _ = _command_expansions[cmd]
for exp_arg in exp_args:
if args and args[0] == "+":
break
exp_arg_name = exp_arg[0]
if args and "=" not in args[0]:
# Argument given without a name.
value = args.pop(0)
elif args and args[0].startswith(exp_arg_name + "="):
# Argument given with correct name.
value = args.pop(0).split("=", 1)[1]
else:
# No argument given, or argument given with a different name.
if len(exp_arg) == 1:
# Required argument (it has no default).
usage_error(cmd, exp_args, f"missing argument {exp_arg_name}")
else:
# Optional argument with a default.
value = exp_arg[1]
pre.append(f"{exp_arg_name}={value}")
args[0:0] = exp_sub
last_arg_idx = len(exp_sub)
if last_arg_idx < len(args) and "=" in args[last_arg_idx]:
# Extra unknown arguments given.
arg = args[last_arg_idx].split("=", 1)[0]
usage_error(cmd, exp_args, f"given unexpected argument {arg}")
# Insert expansion with optional setting of arguments.
if pre:
args[0:0] = ["exec", ";".join(pre)]
class State:
def __init__(self):
self.transport = None
self._did_action = False
self._auto_soft_reset = True
def did_action(self):
self._did_action = True
def run_repl_on_completion(self):
return not self._did_action
def ensure_connected(self):
if self.transport is None:
do_connect(self)
def ensure_raw_repl(self, soft_reset=None):
self.ensure_connected()
soft_reset = self._auto_soft_reset if soft_reset is None else soft_reset
if soft_reset or not self.transport.in_raw_repl:
self.transport.enter_raw_repl(soft_reset=soft_reset)
self._auto_soft_reset = False
def ensure_friendly_repl(self):
self.ensure_connected()
if self.transport.in_raw_repl:
self.transport.exit_raw_repl()
def main():
config = load_user_config()
prepare_command_expansions(config)
remaining_args = sys.argv[1:]
state = State()
try:
while remaining_args:
# Skip the terminator.
if remaining_args[0] == "+":
remaining_args.pop(0)
continue
# Rewrite the front of the list with any matching expansion.
do_command_expansion(remaining_args)
# The (potentially rewritten) command must now be a base command.
cmd = remaining_args.pop(0)
try:
handler_func, parser_func = _COMMANDS[cmd]
except KeyError:
raise CommandError(f"'{cmd}' is not a command")
# If this command (or any down the chain) has a terminator, then
# limit the arguments passed for this command. They will be added
# back after processing this command.
try:
terminator = remaining_args.index("+")
command_args = remaining_args[:terminator]
extra_args = remaining_args[terminator:]
except ValueError:
command_args = remaining_args
extra_args = []
# Special case: "fs ls" and "fs tree" can have only options and no path specified.
if (
cmd == "fs"
and len(command_args) >= 1
and command_args[0] in ("ls", "tree")
and sum(1 for a in command_args if not a.startswith("-")) == 1
):
command_args.append("")
# Use the command-specific argument parser.
cmd_parser = parser_func()
cmd_parser.prog = cmd
# Catch all for unhandled positional arguments (this is the next command).
cmd_parser.add_argument(
"next_command", nargs=argparse.REMAINDER, help=f"Next {_PROG} command"
)
args = cmd_parser.parse_args(command_args)
# Execute command.
handler_func(state, args)
# Get any leftover unprocessed args.
remaining_args = args.next_command + extra_args
# If no commands were "actions" then implicitly finish with the REPL
# using default args.
if state.run_repl_on_completion():
disconnected = do_repl(state, argparse_repl().parse_args([]))
# Handle disconnection message
if disconnected:
print("\ndevice disconnected")
return 0
except CommandError as e:
# Make sure existing stdout appears before the error message on stderr.
sys.stdout.flush()
print(f"{_PROG}: {e}", file=sys.stderr)
sys.stderr.flush()
return 1
finally:
do_disconnect(state)

View File

@@ -0,0 +1,219 @@
# Micropython package installer
# Ported from micropython-lib/micropython/mip/mip.py.
# MIT license; Copyright (c) 2022 Jim Mussared
import urllib.error
import urllib.request
import json
import tempfile
import os
import os.path
from .commands import CommandError, show_progress_bar
_PACKAGE_INDEX = "https://micropython.org/pi/v2"
allowed_mip_url_prefixes = ("http://", "https://", "github:", "gitlab:")
# This implements os.makedirs(os.dirname(path))
def _ensure_path_exists(transport, path):
split = path.split("/")
# Handle paths starting with "/".
if not split[0]:
split.pop(0)
split[0] = "/" + split[0]
prefix = ""
for i in range(len(split) - 1):
prefix += split[i]
if not transport.fs_exists(prefix):
transport.fs_mkdir(prefix)
prefix += "/"
# Check if the specified path exists and matches the hash.
def _check_exists(transport, path, short_hash):
try:
remote_hash = transport.fs_hashfile(path, "sha256")
except FileNotFoundError:
return False
return remote_hash.hex()[: len(short_hash)] == short_hash
def _rewrite_url(url, branch=None):
if not branch:
branch = "HEAD"
if url.startswith("github:"):
url = url[7:].split("/")
url = (
"https://raw.githubusercontent.com/"
+ url[0]
+ "/"
+ url[1]
+ "/"
+ branch
+ "/"
+ "/".join(url[2:])
)
elif url.startswith("gitlab:"):
url = url[7:].split("/")
url = (
"https://gitlab.com/"
+ url[0]
+ "/"
+ url[1]
+ "/-/raw/"
+ branch
+ "/"
+ "/".join(url[2:])
)
return url
def _download_file(transport, url, dest):
if url.startswith(allowed_mip_url_prefixes):
try:
with urllib.request.urlopen(url) as src:
data = src.read()
except urllib.error.HTTPError as e:
if e.status == 404:
raise CommandError(f"File not found: {url}")
else:
raise CommandError(f"Error {e.status} requesting {url}")
except urllib.error.URLError as e:
raise CommandError(f"{e.reason} requesting {url}")
else:
if "\\" in url:
raise CommandError(f'Use "/" instead of "\\" in file URLs: {url!r}\n')
try:
with open(url, "rb") as f:
data = f.read()
except OSError as e:
raise CommandError(f"{e.strerror} opening {url}")
print("Installing:", dest)
_ensure_path_exists(transport, dest)
transport.fs_writefile(dest, data, progress_callback=show_progress_bar)
def _install_json(transport, package_json_url, index, target, version, mpy):
base_url = ""
if package_json_url.startswith(allowed_mip_url_prefixes):
try:
with urllib.request.urlopen(_rewrite_url(package_json_url, version)) as response:
package_json = json.load(response)
except urllib.error.HTTPError as e:
if e.status == 404:
raise CommandError(f"Package not found: {package_json_url}")
else:
raise CommandError(f"Error {e.status} requesting {package_json_url}")
except urllib.error.URLError as e:
raise CommandError(f"{e.reason} requesting {package_json_url}")
base_url = package_json_url.rpartition("/")[0]
elif package_json_url.endswith(".json"):
try:
with open(package_json_url, "r") as f:
package_json = json.load(f)
except OSError:
raise CommandError(f"Error opening {package_json_url}")
base_url = os.path.dirname(package_json_url)
else:
raise CommandError(f"Invalid url for package: {package_json_url}")
for target_path, short_hash in package_json.get("hashes", ()):
fs_target_path = target + "/" + target_path
if _check_exists(transport, fs_target_path, short_hash):
print("Exists:", fs_target_path)
else:
file_url = f"{index}/file/{short_hash[:2]}/{short_hash}"
_download_file(transport, file_url, fs_target_path)
for target_path, url in package_json.get("urls", ()):
fs_target_path = target + "/" + target_path
if base_url and not url.startswith(allowed_mip_url_prefixes):
url = f"{base_url}/{url}" # Relative URLs
_download_file(transport, _rewrite_url(url, version), fs_target_path)
for dep, dep_version in package_json.get("deps", ()):
_install_package(transport, dep, index, target, dep_version, mpy)
def _install_package(transport, package, index, target, version, mpy):
if package.startswith(allowed_mip_url_prefixes):
if package.endswith(".py") or package.endswith(".mpy"):
print(f"Downloading {package} to {target}")
_download_file(
transport, _rewrite_url(package, version), target + "/" + package.rsplit("/")[-1]
)
return
else:
if not package.endswith(".json"):
if not package.endswith("/"):
package += "/"
package += "package.json"
print(f"Installing {package} to {target}")
elif package.endswith(".json"):
pass
else:
if not version:
version = "latest"
print(f"Installing {package} ({version}) from {index} to {target}")
mpy_version = "py"
if mpy:
transport.exec("import sys")
mpy_version = transport.eval("getattr(sys.implementation, '_mpy', 0) & 0xFF") or "py"
package = f"{index}/package/{mpy_version}/{package}/{version}.json"
_install_json(transport, package, index, target, version, mpy)
def do_mip(state, args):
state.did_action()
if args.command[0] == "install":
state.ensure_raw_repl()
for package in args.packages:
version = None
if "@" in package:
package, version = package.split("@")
print("Install", package)
if args.index is None:
args.index = _PACKAGE_INDEX
if args.target is None:
state.transport.exec("import sys")
lib_paths = [
p
for p in state.transport.eval("sys.path")
if not p.startswith("/rom") and p.endswith("/lib")
]
if lib_paths and lib_paths[0]:
args.target = lib_paths[0]
else:
raise CommandError(
"Unable to find lib dir in sys.path, use --target to override"
)
if args.mpy is None:
args.mpy = True
try:
_install_package(
state.transport,
package,
args.index.rstrip("/"),
args.target,
version,
args.mpy,
)
except CommandError:
print("Package may be partially installed")
raise
print("Done")
else:
raise CommandError(f"mip: '{args.command[0]}' is not a command")

View File

@@ -0,0 +1,55 @@
import errno
import platform
# This table maps numeric values defined by `py/mperrno.h` to host errno code.
MP_ERRNO_TABLE = {
1: errno.EPERM,
2: errno.ENOENT,
3: errno.ESRCH,
4: errno.EINTR,
5: errno.EIO,
6: errno.ENXIO,
7: errno.E2BIG,
8: errno.ENOEXEC,
9: errno.EBADF,
10: errno.ECHILD,
11: errno.EAGAIN,
12: errno.ENOMEM,
13: errno.EACCES,
14: errno.EFAULT,
16: errno.EBUSY,
17: errno.EEXIST,
18: errno.EXDEV,
19: errno.ENODEV,
20: errno.ENOTDIR,
21: errno.EISDIR,
22: errno.EINVAL,
23: errno.ENFILE,
24: errno.EMFILE,
25: errno.ENOTTY,
26: errno.ETXTBSY,
27: errno.EFBIG,
28: errno.ENOSPC,
29: errno.ESPIPE,
30: errno.EROFS,
31: errno.EMLINK,
32: errno.EPIPE,
33: errno.EDOM,
34: errno.ERANGE,
95: errno.EOPNOTSUPP,
97: errno.EAFNOSUPPORT,
98: errno.EADDRINUSE,
103: errno.ECONNABORTED,
104: errno.ECONNRESET,
105: errno.ENOBUFS,
106: errno.EISCONN,
107: errno.ENOTCONN,
110: errno.ETIMEDOUT,
111: errno.ECONNREFUSED,
113: errno.EHOSTUNREACH,
114: errno.EALREADY,
115: errno.EINPROGRESS,
125: errno.ECANCELED,
}
if platform.system() != "Windows":
MP_ERRNO_TABLE[15] = errno.ENOTBLK

View File

@@ -0,0 +1,121 @@
from .console import Console, ConsolePosix
from .transport import TransportError
def do_repl_main_loop(
state, console_in, console_out_write, *, escape_non_printable, code_to_inject, file_to_inject
):
while True:
try:
console_in.waitchar(state.transport.serial)
c = console_in.readchar()
if c:
if c in (b"\x1d", b"\x18"): # ctrl-] or ctrl-x, quit
break
elif c == b"\x04": # ctrl-D
# special handling needed for ctrl-D if filesystem is mounted
state.transport.write_ctrl_d(console_out_write)
elif c == b"\x0a" and code_to_inject is not None: # ctrl-j, inject code
state.transport.serial.write(code_to_inject)
elif c == b"\x0b" and file_to_inject is not None: # ctrl-k, inject script
console_out_write(bytes("Injecting %s\r\n" % file_to_inject, "utf8"))
state.transport.enter_raw_repl(soft_reset=False)
with open(file_to_inject, "rb") as f:
pyfile = f.read()
try:
state.transport.exec_raw_no_follow(pyfile)
except TransportError as er:
console_out_write(b"Error:\r\n")
console_out_write(er)
state.transport.exit_raw_repl()
else:
state.transport.serial.write(c)
n = state.transport.serial.inWaiting()
if n > 0:
dev_data_in = state.transport.serial.read(n)
if dev_data_in is not None:
if escape_non_printable:
# Pass data through to the console, with escaping of non-printables.
console_data_out = bytearray()
for c in dev_data_in:
if c in (8, 9, 10, 13, 27) or 32 <= c <= 126:
console_data_out.append(c)
else:
console_data_out.extend(b"[%02x]" % c)
else:
console_data_out = dev_data_in
console_out_write(console_data_out)
except OSError as er:
if _is_disconnect_exception(er):
return True
else:
raise
return False
def do_repl(state, args):
state.ensure_friendly_repl()
state.did_action()
escape_non_printable = args.escape_non_printable
capture_file = args.capture
code_to_inject = args.inject_code
file_to_inject = args.inject_file
print("Connected to MicroPython at %s" % state.transport.device_name)
print("Use Ctrl-] or Ctrl-x to exit this shell")
if escape_non_printable:
print("Escaping non-printable bytes/characters by printing their hex code")
if capture_file is not None:
print('Capturing session to file "%s"' % capture_file)
capture_file = open(capture_file, "wb")
if code_to_inject is not None:
code_to_inject = bytes(code_to_inject.replace("\\n", "\r\n"), "utf8")
print("Use Ctrl-J to inject", code_to_inject)
if file_to_inject is not None:
print('Use Ctrl-K to inject file "%s"' % file_to_inject)
console = Console()
console.enter()
def console_out_write(b):
console.write(b)
if capture_file is not None:
capture_file.write(b)
capture_file.flush()
try:
return do_repl_main_loop(
state,
console,
console_out_write,
escape_non_printable=escape_non_printable,
code_to_inject=code_to_inject,
file_to_inject=file_to_inject,
)
finally:
console.exit()
if capture_file is not None:
capture_file.close()
def _is_disconnect_exception(exception):
"""
Check if an exception indicates device disconnect.
Returns True if the exception indicates the device has disconnected,
False otherwise.
"""
if isinstance(exception, OSError):
if hasattr(exception, "args") and len(exception.args) > 0:
# IO error, device disappeared
if exception.args[0] == 5:
return True
# Check for common disconnect messages in the exception string
exception_str = str(exception)
disconnect_indicators = ["Write timeout", "Device disconnected", "ClearCommError failed"]
return any(indicator in exception_str for indicator in disconnect_indicators)
return False

View File

@@ -0,0 +1,148 @@
# MIT license; Copyright (c) 2022 Damien P. George
import struct, sys, os
try:
from mpy_cross import run as mpy_cross_run
except ImportError:
mpy_cross_run = None
class VfsRomWriter:
ROMFS_HEADER = b"\xd2\xcd\x31"
ROMFS_RECORD_KIND_UNUSED = 0
ROMFS_RECORD_KIND_PADDING = 1
ROMFS_RECORD_KIND_DATA_VERBATIM = 2
ROMFS_RECORD_KIND_DATA_POINTER = 3
ROMFS_RECORD_KIND_DIRECTORY = 4
ROMFS_RECORD_KIND_FILE = 5
def __init__(self):
self._dir_stack = [(None, bytearray())]
def _encode_uint(self, value):
encoded = [value & 0x7F]
value >>= 7
while value != 0:
encoded.insert(0, 0x80 | (value & 0x7F))
value >>= 7
return bytes(encoded)
def _pack(self, kind, payload):
return self._encode_uint(kind) + self._encode_uint(len(payload)) + payload
def _extend(self, data):
buf = self._dir_stack[-1][1]
buf.extend(data)
return len(buf)
def finalise(self):
_, data = self._dir_stack.pop()
encoded_kind = VfsRomWriter.ROMFS_HEADER
encoded_len = self._encode_uint(len(data))
if (len(encoded_kind) + len(encoded_len) + len(data)) % 2 == 1:
encoded_len = b"\x80" + encoded_len
data = encoded_kind + encoded_len + data
return data
def opendir(self, dirname):
self._dir_stack.append((dirname, bytearray()))
def closedir(self):
dirname, dirdata = self._dir_stack.pop()
dirdata = self._encode_uint(len(dirname)) + bytes(dirname, "ascii") + dirdata
self._extend(self._pack(VfsRomWriter.ROMFS_RECORD_KIND_DIRECTORY, dirdata))
def mkdata(self, data):
assert len(self._dir_stack) == 1
return self._extend(self._pack(VfsRomWriter.ROMFS_RECORD_KIND_DATA_VERBATIM, data)) - len(
data
)
def mkfile(self, filename, filedata):
filename = bytes(filename, "ascii")
payload = self._encode_uint(len(filename))
payload += filename
if isinstance(filedata, tuple):
sub_payload = self._encode_uint(filedata[0])
sub_payload += self._encode_uint(filedata[1])
payload += self._pack(VfsRomWriter.ROMFS_RECORD_KIND_DATA_POINTER, sub_payload)
else:
payload += self._pack(VfsRomWriter.ROMFS_RECORD_KIND_DATA_VERBATIM, filedata)
self._dir_stack[-1][1].extend(self._pack(VfsRomWriter.ROMFS_RECORD_KIND_FILE, payload))
def copy_recursively(vfs, src_dir, print_prefix, mpy_cross):
assert src_dir.endswith("/")
DIR = 1 << 14
mpy_cross_missed = 0
dir_contents = sorted(os.listdir(src_dir))
for name in dir_contents:
src_name = src_dir + name
st = os.stat(src_name)
if name == dir_contents[-1]:
# Last entry in the directory listing.
print_entry = "\\--"
print_recurse = " "
else:
# Not the last entry in the directory listing.
print_entry = "|--"
print_recurse = "| "
if st[0] & DIR:
# A directory, enter it and copy its contents recursively.
print(print_prefix + print_entry, name + "/")
vfs.opendir(name)
mpy_cross_missed += copy_recursively(
vfs, src_name + "/", print_prefix + print_recurse, mpy_cross
)
vfs.closedir()
else:
# A file.
did_mpy = False
name_extra = ""
if mpy_cross and name.endswith(".py"):
name_mpy = name[:-3] + ".mpy"
src_name_mpy = src_dir + name_mpy
if not os.path.isfile(src_name_mpy):
if mpy_cross_run is not None:
did_mpy = True
proc = mpy_cross_run(src_name)
proc.wait()
else:
mpy_cross_missed += 1
if did_mpy:
name_extra = " -> .mpy"
print(print_prefix + print_entry, name + name_extra)
if did_mpy:
name = name_mpy
src_name = src_name_mpy
with open(src_name, "rb") as src:
vfs.mkfile(name, src.read())
if did_mpy:
os.remove(src_name_mpy)
return mpy_cross_missed
def make_romfs(src_dir, *, mpy_cross):
if not src_dir.endswith("/"):
src_dir += "/"
vfs = VfsRomWriter()
# Build the filesystem recursively.
print("Building romfs filesystem, source directory: {}".format(src_dir))
print("/")
try:
mpy_cross_missed = copy_recursively(vfs, src_dir, "", mpy_cross)
except OSError as er:
print("Error: OSError {}".format(er), file=sys.stderr)
sys.exit(1)
if mpy_cross_missed:
print("Warning: `mpy_cross` module not found, .py files were not precompiled")
mpy_cross = False
return vfs.finalise()

View File

@@ -0,0 +1,211 @@
#!/usr/bin/env python3
#
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2023 Jim Mussared
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import ast, errno, hashlib, os, re, sys
from collections import namedtuple
from .mp_errno import MP_ERRNO_TABLE
def stdout_write_bytes(b):
b = b.replace(b"\x04", b"")
if hasattr(sys.stdout, "buffer"):
sys.stdout.buffer.write(b)
sys.stdout.buffer.flush()
else:
text = b.decode(sys.stdout.encoding, "strict")
sys.stdout.write(text)
class TransportError(Exception):
pass
class TransportExecError(TransportError):
def __init__(self, status_code, error_output):
self.status_code = status_code
self.error_output = error_output
super().__init__(error_output)
listdir_result = namedtuple("dir_result", ["name", "st_mode", "st_ino", "st_size"])
# Takes a Transport error (containing the text of an OSError traceback) and
# raises it as the corresponding OSError-derived exception.
def _convert_filesystem_error(e, info):
if "OSError" in e.error_output:
for code, estr in [
*errno.errorcode.items(),
(errno.EOPNOTSUPP, "EOPNOTSUPP"),
]:
if estr in e.error_output:
return OSError(code, info)
# Some targets don't render OSError with the name of the errno, so in these
# cases support an explicit mapping of errnos to known numeric codes.
error_lines = e.error_output.splitlines()
match = re.match(r"OSError: (\d+)$", error_lines[-1])
if match:
value = int(match.group(1), 10)
if value in MP_ERRNO_TABLE:
return OSError(MP_ERRNO_TABLE[value], info)
return e
class Transport:
def fs_listdir(self, src=""):
buf = bytearray()
def repr_consumer(b):
buf.extend(b.replace(b"\x04", b""))
cmd = "import os\nfor f in os.ilistdir(%s):\n print(repr(f), end=',')" % (
("'%s'" % src) if src else ""
)
try:
buf.extend(b"[")
self.exec(cmd, data_consumer=repr_consumer)
buf.extend(b"]")
except TransportExecError as e:
raise _convert_filesystem_error(e, src) from None
return [
listdir_result(*f) if len(f) == 4 else listdir_result(*(f + (0,)))
for f in ast.literal_eval(buf.decode())
]
def fs_stat(self, src):
try:
self.exec("import os")
return os.stat_result(self.eval("os.stat(%s)" % ("'%s'" % src)))
except TransportExecError as e:
raise _convert_filesystem_error(e, src) from None
def fs_exists(self, src):
try:
self.fs_stat(src)
return True
except OSError:
return False
def fs_isdir(self, src):
try:
mode = self.fs_stat(src).st_mode
return (mode & 0x4000) != 0
except OSError:
# Match CPython, a non-existent path is not a directory.
return False
def fs_printfile(self, src, chunk_size=256):
cmd = (
"with open('%s') as f:\n while 1:\n"
" b=f.read(%u)\n if not b:break\n print(b,end='')" % (src, chunk_size)
)
try:
self.exec(cmd, data_consumer=stdout_write_bytes)
except TransportExecError as e:
raise _convert_filesystem_error(e, src) from None
def fs_readfile(self, src, chunk_size=256, progress_callback=None):
if progress_callback:
src_size = self.fs_stat(src).st_size
contents = bytearray()
try:
self.exec("f=open('%s','rb')\nr=f.read" % src)
while True:
chunk = self.eval("r({})".format(chunk_size))
if not chunk:
break
contents.extend(chunk)
if progress_callback:
progress_callback(len(contents), src_size)
self.exec("f.close()")
except TransportExecError as e:
raise _convert_filesystem_error(e, src) from None
return contents
def fs_writefile(self, dest, data, chunk_size=256, progress_callback=None):
if progress_callback:
src_size = len(data)
written = 0
try:
self.exec("f=open('%s','wb')\nw=f.write" % dest)
while data:
chunk = data[:chunk_size]
self.exec("w(" + repr(chunk) + ")")
data = data[len(chunk) :]
if progress_callback:
written += len(chunk)
progress_callback(written, src_size)
self.exec("f.close()")
except TransportExecError as e:
raise _convert_filesystem_error(e, dest) from None
def fs_mkdir(self, path):
try:
self.exec("import os\nos.mkdir('%s')" % path)
except TransportExecError as e:
raise _convert_filesystem_error(e, path) from None
def fs_rmdir(self, path):
try:
self.exec("import os\nos.rmdir('%s')" % path)
except TransportExecError as e:
raise _convert_filesystem_error(e, path) from None
def fs_rmfile(self, path):
try:
self.exec("import os\nos.remove('%s')" % path)
except TransportExecError as e:
raise _convert_filesystem_error(e, path) from None
def fs_touchfile(self, path):
try:
self.exec("f=open('%s','a')\nf.close()" % path)
except TransportExecError as e:
raise _convert_filesystem_error(e, path) from None
def fs_hashfile(self, path, algo, chunk_size=256):
try:
self.exec("import hashlib\nh = hashlib.{algo}()".format(algo=algo))
except TransportExecError:
# hashlib (or hashlib.{algo}) not available on device. Do the hash locally.
data = self.fs_readfile(path, chunk_size=chunk_size)
return getattr(hashlib, algo)(data).digest()
try:
self.exec(
"buf = memoryview(bytearray({chunk_size}))\nwith open('{path}', 'rb') as f:\n while True:\n n = f.readinto(buf)\n if n == 0:\n break\n h.update(buf if n == {chunk_size} else buf[:n])\n".format(
chunk_size=chunk_size, path=path
)
)
return self.eval("h.digest()")
except TransportExecError as e:
raise _convert_filesystem_error(e, path) from None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,90 @@
"""
Control of WS2812 / NeoPixel LEDs.
MicroPython module: https://docs.micropython.org/en/v1.26.0/library/neopixel.html
This module provides a driver for WS2818 / NeoPixel LEDs.
``Note:`` This module is only included by default on the ESP8266, ESP32 and RP2
ports. On STM32 / Pyboard and others, you can either install the
``neopixel`` package using :term:`mip`, or you can download the module
directly from :term:`micropython-lib` and copy it to the filesystem.
"""
from __future__ import annotations
from _typeshed import Incomplete
from _mpy_shed import _NeoPixelBase
from machine import Pin
from typing import Tuple
from typing_extensions import Awaitable, TypeAlias, TypeVar
_Color: TypeAlias = tuple[int, int, int] | tuple[int, int, int, int]
class NeoPixel(_NeoPixelBase):
"""
This class stores pixel data for a WS2812 LED strip connected to a pin. The
application should set pixel data and then call :meth:`NeoPixel.write`
when it is ready to update the strip.
For example::
import neopixel
# 32 LED strip connected to X8.
p = machine.Pin.board.X8
n = neopixel.NeoPixel(p, 32)
# Draw a red gradient.
for i in range(32):
n[i] = (i * 8, 0, 0)
# Update the strip.
n.write()
"""
ORDER: Incomplete
pin: Incomplete
n: Incomplete
bpp: Incomplete
buf: Incomplete
timing: Incomplete
def __init__(self, pin, n, bpp: int = 3, timing: int = 1) -> None:
"""
Construct an NeoPixel object. The parameters are:
- *pin* is a machine.Pin instance.
- *n* is the number of LEDs in the strip.
- *bpp* is 3 for RGB LEDs, and 4 for RGBW LEDs.
- *timing* is 0 for 400KHz, and 1 for 800kHz LEDs (most are 800kHz).
"""
def __len__(self) -> int:
"""
Returns the number of LEDs in the strip.
"""
...
def __setitem__(self, i, v) -> None:
"""
Set the pixel at *index* to the value, which is an RGB/RGBW tuple.
"""
...
def __getitem__(self, i) -> Tuple:
"""
Returns the pixel at *index* as an RGB/RGBW tuple.
"""
...
def fill(self, v) -> None:
"""
Sets the value of all pixels to the specified *pixel* value (i.e. an
RGB/RGBW tuple).
"""
...
def write(self) -> None:
"""
Writes the current pixel data to the strip.
"""
...

View File

@@ -0,0 +1,21 @@
from _typeshed import Incomplete
class OneWireError(Exception): ...
class OneWire:
SEARCH_ROM: int
MATCH_ROM: int
SKIP_ROM: int
pin: Incomplete
def __init__(self, pin) -> None: ...
def reset(self, required: bool = False): ...
def readbit(self): ...
def readbyte(self): ...
def readinto(self, buf) -> None: ...
def writebit(self, value): ...
def writebyte(self, value): ...
def write(self, buf) -> None: ...
def select_rom(self, rom) -> None: ...
def scan(self): ...
def _search_rom(self, l_rom, diff): ...
def crc8(self, data): ...

View File

@@ -0,0 +1,760 @@
@Switch01
A_Rog
Aakanksha Agrawal
Abhinav Sagar
ABHYUDAY PRATAP SINGH
abs51295
AceGentile
Adam Chainz
Adam Tse
Adam Wentz
admin
Adrien Morison
ahayrapetyan
Ahilya
AinsworthK
Akash Srivastava
Alan Yee
Albert Tugushev
Albert-Guan
albertg
Alberto Sottile
Aleks Bunin
Ales Erjavec
Alethea Flowers
Alex Gaynor
Alex Grönholm
Alex Hedges
Alex Loosley
Alex Morega
Alex Stachowiak
Alexander Shtyrov
Alexandre Conrad
Alexey Popravka
Aleš Erjavec
Alli
Ami Fischman
Ananya Maiti
Anatoly Techtonik
Anders Kaseorg
Andre Aguiar
Andreas Lutro
Andrei Geacar
Andrew Gaul
Andrew Shymanel
Andrey Bienkowski
Andrey Bulgakov
Andrés Delfino
Andy Freeland
Andy Kluger
Ani Hayrapetyan
Aniruddha Basak
Anish Tambe
Anrs Hu
Anthony Sottile
Antoine Musso
Anton Ovchinnikov
Anton Patrushev
Antonio Alvarado Hernandez
Antony Lee
Antti Kaihola
Anubhav Patel
Anudit Nagar
Anuj Godase
AQNOUCH Mohammed
AraHaan
Arindam Choudhury
Armin Ronacher
Artem
Arun Babu Neelicattu
Ashley Manton
Ashwin Ramaswami
atse
Atsushi Odagiri
Avinash Karhana
Avner Cohen
Awit (Ah-Wit) Ghirmai
Baptiste Mispelon
Barney Gale
barneygale
Bartek Ogryczak
Bastian Venthur
Ben Bodenmiller
Ben Darnell
Ben Hoyt
Ben Mares
Ben Rosser
Bence Nagy
Benjamin Peterson
Benjamin VanEvery
Benoit Pierre
Berker Peksag
Bernard
Bernard Tyers
Bernardo B. Marques
Bernhard M. Wiedemann
Bertil Hatt
Bhavam Vidyarthi
Blazej Michalik
Bogdan Opanchuk
BorisZZZ
Brad Erickson
Bradley Ayers
Brandon L. Reiss
Brandt Bucher
Brett Randall
Brett Rosen
Brian Cristante
Brian Rosner
briantracy
BrownTruck
Bruno Oliveira
Bruno Renié
Bruno S
Bstrdsmkr
Buck Golemon
burrows
Bussonnier Matthias
bwoodsend
c22
Caleb Martinez
Calvin Smith
Carl Meyer
Carlos Liam
Carol Willing
Carter Thayer
Cass
Chandrasekhar Atina
Chih-Hsuan Yen
Chris Brinker
Chris Hunt
Chris Jerdonek
Chris Kuehl
Chris McDonough
Chris Pawley
Chris Pryer
Chris Wolfe
Christian Clauss
Christian Heimes
Christian Oudard
Christoph Reiter
Christopher Hunt
Christopher Snyder
cjc7373
Clark Boylan
Claudio Jolowicz
Clay McClure
Cody
Cody Soyland
Colin Watson
Collin Anderson
Connor Osborn
Cooper Lees
Cooper Ry Lees
Cory Benfield
Cory Wright
Craig Kerstiens
Cristian Sorinel
Cristina
Cristina Muñoz
Curtis Doty
cytolentino
Daan De Meyer
Dale
Damian
Damian Quiroga
Damian Shaw
Dan Black
Dan Savilonis
Dan Sully
Dane Hillard
daniel
Daniel Collins
Daniel Hahler
Daniel Holth
Daniel Jost
Daniel Katz
Daniel Shaulov
Daniele Esposti
Daniele Nicolodi
Daniele Procida
Daniil Konovalenko
Danny Hermes
Danny McClanahan
Darren Kavanagh
Dav Clark
Dave Abrahams
Dave Jones
David Aguilar
David Black
David Bordeynik
David Caro
David D Lowe
David Evans
David Hewitt
David Linke
David Poggi
David Pursehouse
David Runge
David Tucker
David Wales
Davidovich
ddelange
Deepak Sharma
Deepyaman Datta
Denise Yu
dependabot[bot]
derwolfe
Desetude
Devesh Kumar Singh
Diego Caraballo
Diego Ramirez
DiegoCaraballo
Dimitri Merejkowsky
Dimitri Papadopoulos
Dirk Stolle
Dmitry Gladkov
Dmitry Volodin
Domen Kožar
Dominic Davis-Foster
Donald Stufft
Dongweiming
doron zarhi
Dos Moonen
Douglas Thor
DrFeathers
Dustin Ingram
Dwayne Bailey
Ed Morley
Edgar Ramírez
Edgar Ramírez Mondragón
Ee Durbin
Efflam Lemaillet
efflamlemaillet
Eitan Adler
ekristina
elainechan
Eli Schwartz
Elisha Hollander
Ellen Marie Dash
Emil Burzo
Emil Styrke
Emmanuel Arias
Endoh Takanao
enoch
Erdinc Mutlu
Eric Cousineau
Eric Gillingham
Eric Hanchrow
Eric Hopper
Erik M. Bray
Erik Rose
Erwin Janssen
Eugene Vereshchagin
everdimension
Federico
Felipe Peter
Felix Yan
fiber-space
Filip Kokosiński
Filipe Laíns
Finn Womack
finnagin
Flavio Amurrio
Florian Briand
Florian Rathgeber
Francesco
Francesco Montesano
Frost Ming
Gabriel Curio
Gabriel de Perthuis
Garry Polley
gavin
gdanielson
Geoffrey Sneddon
George Song
Georgi Valkov
Georgy Pchelkin
ghost
Giftlin Rajaiah
gizmoguy1
gkdoc
Godefroid Chapelle
Gopinath M
GOTO Hayato
gousaiyang
gpiks
Greg Roodt
Greg Ward
Guilherme Espada
Guillaume Seguin
gutsytechster
Guy Rozendorn
Guy Tuval
gzpan123
Hanjun Kim
Hari Charan
Harsh Vardhan
harupy
Harutaka Kawamura
hauntsaninja
Henrich Hartzer
Henry Schreiner
Herbert Pfennig
Holly Stotelmyer
Honnix
Hsiaoming Yang
Hugo Lopes Tavares
Hugo van Kemenade
Hugues Bruant
Hynek Schlawack
Ian Bicking
Ian Cordasco
Ian Lee
Ian Stapleton Cordasco
Ian Wienand
Igor Kuzmitshov
Igor Sobreira
Ilan Schnell
Illia Volochii
Ilya Baryshev
Inada Naoki
Ionel Cristian Mărieș
Ionel Maries Cristian
Itamar Turner-Trauring
Ivan Pozdeev
J. Nick Koston
Jacob Kim
Jacob Walls
Jaime Sanz
jakirkham
Jakub Kuczys
Jakub Stasiak
Jakub Vysoky
Jakub Wilk
James Cleveland
James Curtin
James Firth
James Gerity
James Polley
Jan Pokorný
Jannis Leidel
Jarek Potiuk
jarondl
Jason Curtis
Jason R. Coombs
JasonMo
JasonMo1
Jay Graves
Jean Abou Samra
Jean-Christophe Fillion-Robin
Jeff Barber
Jeff Dairiki
Jeff Widman
Jelmer Vernooij
jenix21
Jeremy Stanley
Jeremy Zafran
Jesse Rittner
Jiashuo Li
Jim Fisher
Jim Garrison
Jiun Bae
Jivan Amara
Joe Bylund
Joe Michelini
John Paton
John T. Wodder II
John-Scott Atlakson
johnthagen
Jon Banafato
Jon Dufresne
Jon Parise
Jonas Nockert
Jonathan Herbert
Joonatan Partanen
Joost Molenaar
Jorge Niedbalski
Joseph Bylund
Joseph Long
Josh Bronson
Josh Hansen
Josh Schneier
Joshua
Juan Luis Cano Rodríguez
Juanjo Bazán
Judah Rand
Julian Berman
Julian Gethmann
Julien Demoor
Jussi Kukkonen
jwg4
Jyrki Pulliainen
Kai Chen
Kai Mueller
Kamal Bin Mustafa
kasium
kaustav haldar
keanemind
Keith Maxwell
Kelsey Hightower
Kenneth Belitzky
Kenneth Reitz
Kevin Burke
Kevin Carter
Kevin Frommelt
Kevin R Patterson
Kexuan Sun
Kit Randel
Klaas van Schelven
KOLANICH
kpinc
Krishna Oza
Kumar McMillan
Kurt McKee
Kyle Persohn
lakshmanaram
Laszlo Kiss-Kollar
Laurent Bristiel
Laurent LAPORTE
Laurie O
Laurie Opperman
layday
Leon Sasson
Lev Givon
Lincoln de Sousa
Lipis
lorddavidiii
Loren Carvalho
Lucas Cimon
Ludovic Gasc
Lukas Geiger
Lukas Juhrich
Luke Macken
Luo Jiebin
luojiebin
luz.paz
László Kiss Kollár
M00nL1ght
Marc Abramowitz
Marc Tamlyn
Marcus Smith
Mariatta
Mark Kohler
Mark Williams
Markus Hametner
Martey Dodoo
Martin Fischer
Martin Häcker
Martin Pavlasek
Masaki
Masklinn
Matej Stuchlik
Mathew Jennings
Mathieu Bridon
Mathieu Kniewallner
Matt Bacchi
Matt Good
Matt Maker
Matt Robenolt
matthew
Matthew Einhorn
Matthew Feickert
Matthew Gilliard
Matthew Iversen
Matthew Treinish
Matthew Trumbell
Matthew Willson
Matthias Bussonnier
mattip
Maurits van Rees
Max W Chase
Maxim Kurnikov
Maxime Rouyrre
mayeut
mbaluna
mdebi
memoselyk
meowmeowcat
Michael
Michael Aquilina
Michael E. Karpeles
Michael Klich
Michael Mintz
Michael Williamson
michaelpacer
Michał Górny
Mickaël Schoentgen
Miguel Araujo Perez
Mihir Singh
Mike
Mike Hendricks
Min RK
MinRK
Miro Hrončok
Monica Baluna
montefra
Monty Taylor
Muha Ajjan
Nadav Wexler
Nahuel Ambrosini
Nate Coraor
Nate Prewitt
Nathan Houghton
Nathaniel J. Smith
Nehal J Wani
Neil Botelho
Nguyễn Gia Phong
Nicholas Serra
Nick Coghlan
Nick Stenning
Nick Timkovich
Nicolas Bock
Nicole Harris
Nikhil Benesch
Nikhil Ladha
Nikita Chepanov
Nikolay Korolev
Nipunn Koorapati
Nitesh Sharma
Niyas Sait
Noah
Noah Gorny
Nowell Strite
NtaleGrey
nvdv
OBITORASU
Ofek Lev
ofrinevo
Oliver Freund
Oliver Jeeves
Oliver Mannion
Oliver Tonnhofer
Olivier Girardot
Olivier Grisel
Ollie Rutherfurd
OMOTO Kenji
Omry Yadan
onlinejudge95
Oren Held
Oscar Benjamin
Oz N Tiram
Pachwenko
Patrick Dubroy
Patrick Jenkins
Patrick Lawson
patricktokeeffe
Patrik Kopkan
Paul Ganssle
Paul Kehrer
Paul Moore
Paul Nasrat
Paul Oswald
Paul van der Linden
Paulus Schoutsen
Pavel Safronov
Pavithra Eswaramoorthy
Pawel Jasinski
Paweł Szramowski
Pekka Klärck
Peter Gessler
Peter Lisák
Peter Waller
petr-tik
Phaneendra Chiruvella
Phil Elson
Phil Freo
Phil Pennock
Phil Whelan
Philip Jägenstedt
Philip Molloy
Philippe Ombredanne
Pi Delport
Pierre-Yves Rofes
Pieter Degroote
pip
Prabakaran Kumaresshan
Prabhjyotsing Surjit Singh Sodhi
Prabhu Marappan
Pradyun Gedam
Prashant Sharma
Pratik Mallya
pre-commit-ci[bot]
Preet Thakkar
Preston Holmes
Przemek Wrzos
Pulkit Goyal
q0w
Qiangning Hong
Qiming Xu
Quentin Lee
Quentin Pradet
R. David Murray
Rafael Caricio
Ralf Schmitt
Razzi Abuissa
rdb
Reece Dunham
Remi Rampin
Rene Dudfield
Riccardo Magliocchetti
Riccardo Schirone
Richard Jones
Richard Si
Ricky Ng-Adam
Rishi
RobberPhex
Robert Collins
Robert McGibbon
Robert Pollak
Robert T. McGibbon
robin elisha robinson
Roey Berman
Rohan Jain
Roman Bogorodskiy
Roman Donchenko
Romuald Brunet
ronaudinho
Ronny Pfannschmidt
Rory McCann
Ross Brattain
Roy Wellington Ⅳ
Ruairidh MacLeod
Russell Keith-Magee
Ryan Shepherd
Ryan Wooden
ryneeverett
Sachi King
Salvatore Rinchiera
sandeepkiran-js
Sander Van Balen
Savio Jomton
schlamar
Scott Kitterman
Sean
seanj
Sebastian Jordan
Sebastian Schaetz
Segev Finer
SeongSoo Cho
Sergey Vasilyev
Seth Michael Larson
Seth Woodworth
Shahar Epstein
Shantanu
shireenrao
Shivansh-007
Shlomi Fish
Shovan Maity
Simeon Visser
Simon Cross
Simon Pichugin
sinoroc
sinscary
snook92
socketubs
Sorin Sbarnea
Srinivas Nyayapati
Stavros Korokithakis
Stefan Scherfke
Stefano Rivera
Stephan Erb
Stephen Rosen
stepshal
Steve (Gadget) Barnes
Steve Barnes
Steve Dower
Steve Kowalik
Steven Myint
Steven Silvester
stonebig
studioj
Stéphane Bidoul
Stéphane Bidoul (ACSONE)
Stéphane Klein
Sumana Harihareswara
Surbhi Sharma
Sviatoslav Sydorenko
Swat009
Sylvain
Takayuki SHIMIZUKAWA
Taneli Hukkinen
tbeswick
Thiago
Thijs Triemstra
Thomas Fenzl
Thomas Grainger
Thomas Guettler
Thomas Johansson
Thomas Kluyver
Thomas Smith
Thomas VINCENT
Tim D. Smith
Tim Gates
Tim Harder
Tim Heap
tim smith
tinruufu
Tobias Hermann
Tom Forbes
Tom Freudenheim
Tom V
Tomas Hrnciar
Tomas Orsava
Tomer Chachamu
Tommi Enenkel | AnB
Tomáš Hrnčiar
Tony Beswick
Tony Narlock
Tony Zhaocheng Tan
TonyBeswick
toonarmycaptain
Toshio Kuratomi
toxinu
Travis Swicegood
Tushar Sadhwani
Tzu-ping Chung
Valentin Haenel
Victor Stinner
victorvpaulo
Vikram - Google
Viktor Szépe
Ville Skyttä
Vinay Sajip
Vincent Philippon
Vinicyus Macedo
Vipul Kumar
Vitaly Babiy
Vladimir Fokow
Vladimir Rutsky
W. Trevor King
Wil Tan
Wilfred Hughes
William Edwards
William ML Leslie
William T Olson
William Woodruff
Wilson Mo
wim glenn
Winson Luk
Wolfgang Maier
Wu Zhenyu
XAMES3
Xavier Fernandez
xoviat
xtreak
YAMAMOTO Takashi
Yen Chi Hsuan
Yeray Diaz Diaz
Yoval P
Yu Jian
Yuan Jing Vincent Yan
Yusuke Hayashi
Zearin
Zhiping Deng
ziebam
Zvezdan Petkovic
Łukasz Langa
Роман Донченко
Семён Марьясин
rekcäH nitraM

View File

@@ -0,0 +1 @@
pip

View File

@@ -0,0 +1,20 @@
Copyright (c) 2008-present The pip developers (see AUTHORS.txt file)
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,88 @@
Metadata-Version: 2.1
Name: pip
Version: 24.0
Summary: The PyPA recommended tool for installing Python packages.
Author-email: The pip developers <distutils-sig@python.org>
License: MIT
Project-URL: Homepage, https://pip.pypa.io/
Project-URL: Documentation, https://pip.pypa.io
Project-URL: Source, https://github.com/pypa/pip
Project-URL: Changelog, https://pip.pypa.io/en/stable/news/
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3.12
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Requires-Python: >=3.7
Description-Content-Type: text/x-rst
License-File: LICENSE.txt
License-File: AUTHORS.txt
pip - The Python Package Installer
==================================
.. image:: https://img.shields.io/pypi/v/pip.svg
:target: https://pypi.org/project/pip/
:alt: PyPI
.. image:: https://img.shields.io/pypi/pyversions/pip
:target: https://pypi.org/project/pip
:alt: PyPI - Python Version
.. image:: https://readthedocs.org/projects/pip/badge/?version=latest
:target: https://pip.pypa.io/en/latest
:alt: Documentation
pip is the `package installer`_ for Python. You can use pip to install packages from the `Python Package Index`_ and other indexes.
Please take a look at our documentation for how to install and use pip:
* `Installation`_
* `Usage`_
We release updates regularly, with a new version every 3 months. Find more details in our documentation:
* `Release notes`_
* `Release process`_
If you find bugs, need help, or want to talk to the developers, please use our mailing lists or chat rooms:
* `Issue tracking`_
* `Discourse channel`_
* `User IRC`_
If you want to get involved head over to GitHub to get the source code, look at our development documentation and feel free to jump on the developer mailing lists and chat rooms:
* `GitHub page`_
* `Development documentation`_
* `Development IRC`_
Code of Conduct
---------------
Everyone interacting in the pip project's codebases, issue trackers, chat
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
.. _package installer: https://packaging.python.org/guides/tool-recommendations/
.. _Python Package Index: https://pypi.org
.. _Installation: https://pip.pypa.io/en/stable/installation/
.. _Usage: https://pip.pypa.io/en/stable/
.. _Release notes: https://pip.pypa.io/en/stable/news.html
.. _Release process: https://pip.pypa.io/en/latest/development/release-process/
.. _GitHub page: https://github.com/pypa/pip
.. _Development documentation: https://pip.pypa.io/en/latest/development
.. _Issue tracking: https://github.com/pypa/pip/issues
.. _Discourse channel: https://discuss.python.org/c/packaging
.. _User IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa
.. _Development IRC: https://kiwiirc.com/nextclient/#ircs://irc.libera.chat:+6697/pypa-dev
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,5 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.42.0)
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1,4 @@
[console_scripts]
pip = pip._internal.cli.main:main
pip3 = pip._internal.cli.main:main
pip3.12 = pip._internal.cli.main:main

Some files were not shown because too many files have changed in this diff Show More