diff --git a/copy_libs.bat b/copy_libs.bat
new file mode 100644
index 000000000..fed38d6f9
--- /dev/null
+++ b/copy_libs.bat
@@ -0,0 +1,2 @@
+xcopy ..\pipkin\pipkin\*.py thonny\vendored_libs\pipkin /s /e
+@rem pip install filelock -t thonny\vendored_libs
\ No newline at end of file
diff --git a/setup.py b/setup.py
index 52221c26b..7aa871dac 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,8 @@ def recursive_files(directory):
paths = []
for (path, _, filenames) in os.walk(directory):
for filename in filenames:
- paths.append(os.path.join('..', path, filename))
+ if not filename.endswith(".pyc"):
+ paths.append(os.path.join('..', path, filename))
return paths
if sys.version_info < (3, 6):
@@ -69,7 +70,9 @@ def recursive_files(directory):
python_requires=">=3.8",
packages=find_packages(),
package_data={
- "": ["VERSION", "defaults.ini", "res/*"] + recursive_files("thonny/locale"),
+ "": ["VERSION", "defaults.ini", "res/*"]
+ + recursive_files("thonny/locale")
+ + recursive_files("thonny/vendored_libs"),
"thonny.plugins.help": ["*.rst"],
"thonny.plugins.pi": ["res/*.*"],
"thonny.plugins.printing": ["*.html"],
diff --git a/thonny/plugins/cpython_backend/cp_tracers.py b/thonny/plugins/cpython_backend/cp_tracers.py
index ab6ce1325..64b29ef39 100644
--- a/thonny/plugins/cpython_backend/cp_tracers.py
+++ b/thonny/plugins/cpython_backend/cp_tracers.py
@@ -1,3 +1,4 @@
+import _ast
import ast
import builtins
import dis
@@ -9,8 +10,6 @@
from importlib.machinery import PathFinder, SourceFileLoader
from logging import getLogger
-import _ast
-
from thonny.common import (
DebuggerCommand,
DebuggerResponse,
diff --git a/thonny/plugins/micropython/mp_back.py b/thonny/plugins/micropython/mp_back.py
index f6be5d6a3..a0ca48225 100644
--- a/thonny/plugins/micropython/mp_back.py
+++ b/thonny/plugins/micropython/mp_back.py
@@ -114,6 +114,11 @@
class MicroPythonBackend(MainBackend, ABC):
def __init__(self, clean, args):
+ # Make pipkin available
+ sys.path.insert(
+ 0,
+ os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..", "vendored_libs")),
+ )
logger.info("Initializing MicroPythonBackend of type %s", type(self).__name__)
self._connection: MicroPythonConnection
self._args = args
diff --git a/thonny/vendored_libs/filelock/__init__.py b/thonny/vendored_libs/filelock/__init__.py
new file mode 100644
index 000000000..afcdb706d
--- /dev/null
+++ b/thonny/vendored_libs/filelock/__init__.py
@@ -0,0 +1,48 @@
+"""
+A platform independent file lock that supports the with-statement.
+
+.. autodata:: filelock.__version__
+ :no-value:
+
+"""
+from __future__ import annotations
+
+import sys
+import warnings
+
+from ._api import AcquireReturnProxy, BaseFileLock
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock, has_fcntl
+from ._windows import WindowsFileLock
+from .version import version
+
+#: version of the project as a string
+__version__: str = version
+
+
+if sys.platform == "win32": # pragma: win32 cover
+ _FileLock: type[BaseFileLock] = WindowsFileLock
+else: # pragma: win32 no cover
+ if has_fcntl:
+ _FileLock: type[BaseFileLock] = UnixFileLock
+ else:
+ _FileLock = SoftFileLock
+ if warnings is not None:
+ warnings.warn("only soft file lock is available")
+
+#: Alias for the lock, which should be used for the current platform. On Windows, this is an alias for
+# :class:`WindowsFileLock`, on Unix for :class:`UnixFileLock` and otherwise for :class:`SoftFileLock`.
+FileLock: type[BaseFileLock] = _FileLock
+
+
+__all__ = [
+ "__version__",
+ "FileLock",
+ "SoftFileLock",
+ "Timeout",
+ "UnixFileLock",
+ "WindowsFileLock",
+ "BaseFileLock",
+ "AcquireReturnProxy",
+]
diff --git a/thonny/vendored_libs/filelock/_api.py b/thonny/vendored_libs/filelock/_api.py
new file mode 100644
index 000000000..282106fdd
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_api.py
@@ -0,0 +1,240 @@
+from __future__ import annotations
+
+import contextlib
+import logging
+import os
+import time
+import warnings
+from abc import ABC, abstractmethod
+from threading import Lock
+from types import TracebackType
+from typing import Any
+
+from ._error import Timeout
+
+_LOGGER = logging.getLogger("filelock")
+
+
+# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
+# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
+# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
+class AcquireReturnProxy:
+ """A context aware object that will release the lock file when exiting."""
+
+ def __init__(self, lock: BaseFileLock) -> None:
+ self.lock = lock
+
+ def __enter__(self) -> BaseFileLock:
+ return self.lock
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None, # noqa: U100
+ exc_value: BaseException | None, # noqa: U100
+ traceback: TracebackType | None, # noqa: U100
+ ) -> None:
+ self.lock.release()
+
+
+class BaseFileLock(ABC, contextlib.ContextDecorator):
+ """Abstract base class for a file lock object."""
+
+ def __init__(self, lock_file: str | os.PathLike[Any], timeout: float = -1) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock. It will be used as fallback value in the acquire
+ method, if no timeout value (``None``) is given. If you want to disable the timeout, set it to a negative value.
+ A timeout of 0 means, that there is exactly one attempt to acquire the file lock.
+ """
+ # The path to the lock file.
+ self._lock_file: str = os.fspath(lock_file)
+
+ # The file descriptor for the *_lock_file* as it is returned by the os.open() function.
+ # This file lock is only NOT None, if the object currently holds the lock.
+ self._lock_file_fd: int | None = None
+
+ # The default timeout value.
+ self.timeout: float = timeout
+
+ # We use this lock primarily for the lock counter.
+ self._thread_lock: Lock = Lock()
+
+ # The lock counter is used for implementing the nested locking mechanism. Whenever the lock is acquired, the
+ # counter is increased and the lock is only released, when this value is 0 again.
+ self._lock_counter: int = 0
+
+ @property
+ def lock_file(self) -> str:
+ """:return: path to the lock file"""
+ return self._lock_file
+
+ @property
+ def timeout(self) -> float:
+ """
+ :return: the default timeout value
+
+ .. versionadded:: 2.0.0
+ """
+ return self._timeout
+
+ @timeout.setter
+ def timeout(self, value: float | str) -> None:
+ """
+ Change the default timeout value.
+
+ :param value: the new value
+ """
+ self._timeout = float(value)
+
+ @abstractmethod
+ def _acquire(self) -> None:
+ """If the file lock could be acquired, self._lock_file_fd holds the file descriptor of the lock file."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def _release(self) -> None:
+ """Releases the lock and sets self._lock_file_fd to None."""
+ raise NotImplementedError
+
+ @property
+ def is_locked(self) -> bool:
+ """
+
+ :return: A boolean indicating if the lock file is holding the lock currently.
+
+ .. versionchanged:: 2.0.0
+
+ This was previously a method and is now a property.
+ """
+ return self._lock_file_fd is not None
+
+ def acquire(
+ self,
+ timeout: float | None = None,
+ poll_interval: float = 0.05,
+ *,
+ poll_intervall: float | None = None,
+ ) -> AcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
+ if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file
+ :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
+ :raises Timeout: if fails to acquire lock within the timeout period
+ :return: a context object that will unlock the file when the context is exited
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ .. versionchanged:: 2.0.0
+
+ This method returns now a *proxy* object instead of *self*,
+ so that it can be used in a with statement without side effects.
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self.timeout
+
+ if poll_intervall is not None:
+ msg = "use poll_interval instead of poll_intervall"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ poll_interval = poll_intervall
+
+ # Increment the number right at the beginning. We can still undo it, if something fails.
+ with self._thread_lock:
+ self._lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self._lock_file
+ start_time = time.monotonic()
+ try:
+ while True:
+ with self._thread_lock:
+ if not self.is_locked:
+ _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+ self._acquire()
+
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+ break
+ elif 0 <= timeout < time.monotonic() - start_time:
+ _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
+ raise Timeout(self._lock_file)
+ else:
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ time.sleep(poll_interval)
+ except BaseException: # Something did go wrong, so decrement the counter.
+ with self._thread_lock:
+ self._lock_counter = max(0, self._lock_counter - 1)
+ raise
+ return AcquireReturnProxy(lock=self)
+
+ def release(self, force: bool = False) -> None:
+ """
+ Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0. Also
+ note, that the lock file itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is released in every case/
+ """
+ with self._thread_lock:
+
+ if self.is_locked:
+ self._lock_counter -= 1
+
+ if self._lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self._lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+ self._release()
+ self._lock_counter = 0
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ def __enter__(self) -> BaseFileLock:
+ """
+ Acquire the lock.
+
+ :return: the lock object
+ """
+ self.acquire()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None, # noqa: U100
+ exc_value: BaseException | None, # noqa: U100
+ traceback: TracebackType | None, # noqa: U100
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+ """
+ self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ self.release(force=True)
+
+
+__all__ = [
+ "BaseFileLock",
+ "AcquireReturnProxy",
+]
diff --git a/thonny/vendored_libs/filelock/_error.py b/thonny/vendored_libs/filelock/_error.py
new file mode 100644
index 000000000..b3885214d
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_error.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+
+class Timeout(TimeoutError):
+ """Raised when the lock could not be acquired in *timeout* seconds."""
+
+ def __init__(self, lock_file: str) -> None:
+ #: The path of the file lock.
+ self.lock_file = lock_file
+
+ def __str__(self) -> str:
+ return f"The file lock '{self.lock_file}' could not be acquired."
+
+
+__all__ = [
+ "Timeout",
+]
diff --git a/thonny/vendored_libs/filelock/_soft.py b/thonny/vendored_libs/filelock/_soft.py
new file mode 100644
index 000000000..cb09799ad
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_soft.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import os
+import sys
+from errno import EACCES, EEXIST, ENOENT
+
+from ._api import BaseFileLock
+from ._util import raise_on_exist_ro_file
+
+
+class SoftFileLock(BaseFileLock):
+ """Simply watches the existence of the lock file."""
+
+ def _acquire(self) -> None:
+ raise_on_exist_ro_file(self._lock_file)
+ # first check for exists and read-only mode as the open will mask this case as EEXIST
+ mode = (
+ os.O_WRONLY # open for writing only
+ | os.O_CREAT
+ | os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
+ | os.O_TRUNC # truncate the file to zero byte
+ )
+ try:
+ fd = os.open(self._lock_file, mode)
+ except OSError as exception:
+ if exception.errno == EEXIST: # expected if cannot lock
+ pass
+ elif exception.errno == ENOENT: # No such file or directory - parent directory is missing
+ raise
+ elif exception.errno == EACCES and sys.platform != "win32": # pragma: win32 no cover
+ # Permission denied - parent dir is R/O
+ raise # note windows does not allow you to make a folder r/o only files
+ else:
+ self._lock_file_fd = fd
+
+ def _release(self) -> None:
+ os.close(self._lock_file_fd) # type: ignore # the lock file is definitely not None
+ self._lock_file_fd = None
+ try:
+ os.remove(self._lock_file)
+ except OSError: # the file is already deleted and that's what we want
+ pass
+
+
+__all__ = [
+ "SoftFileLock",
+]
diff --git a/thonny/vendored_libs/filelock/_unix.py b/thonny/vendored_libs/filelock/_unix.py
new file mode 100644
index 000000000..03b612c92
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_unix.py
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+import os
+import sys
+from typing import cast
+
+from ._api import BaseFileLock
+
+#: a flag to indicate if the fcntl API is available
+has_fcntl = False
+if sys.platform == "win32": # pragma: win32 cover
+
+ class UnixFileLock(BaseFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+else: # pragma: win32 no cover
+ try:
+ import fcntl
+ except ImportError:
+ pass
+ else:
+ has_fcntl = True
+
+ class UnixFileLock(BaseFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+ def _acquire(self) -> None:
+ open_mode = os.O_RDWR | os.O_CREAT | os.O_TRUNC
+ fd = os.open(self._lock_file, open_mode)
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except OSError:
+ os.close(fd)
+ else:
+ self._lock_file_fd = fd
+
+ def _release(self) -> None:
+ # Do not remove the lockfile:
+ # https://github.com/tox-dev/py-filelock/issues/31
+ # https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
+ fd = cast(int, self._lock_file_fd)
+ self._lock_file_fd = None
+ fcntl.flock(fd, fcntl.LOCK_UN)
+ os.close(fd)
+
+
+__all__ = [
+ "has_fcntl",
+ "UnixFileLock",
+]
diff --git a/thonny/vendored_libs/filelock/_util.py b/thonny/vendored_libs/filelock/_util.py
new file mode 100644
index 000000000..238b80fa1
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_util.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+import os
+import stat
+
+
+def raise_on_exist_ro_file(filename: str) -> None:
+ try:
+ file_stat = os.stat(filename) # use stat to do exists + can write to check without race condition
+ except OSError:
+ return None # swallow does not exist or other errors
+
+ if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
+ if not (file_stat.st_mode & stat.S_IWUSR):
+ raise PermissionError(f"Permission denied: {filename!r}")
+
+
+__all__ = [
+ "raise_on_exist_ro_file",
+]
diff --git a/thonny/vendored_libs/filelock/_windows.py b/thonny/vendored_libs/filelock/_windows.py
new file mode 100644
index 000000000..60e68cb9b
--- /dev/null
+++ b/thonny/vendored_libs/filelock/_windows.py
@@ -0,0 +1,63 @@
+from __future__ import annotations
+
+import os
+import sys
+from errno import ENOENT
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import raise_on_exist_ro_file
+
+if sys.platform == "win32": # pragma: win32 cover
+ import msvcrt
+
+ class WindowsFileLock(BaseFileLock):
+ """Uses the :func:`msvcrt.locking` function to hard lock the lock file on windows systems."""
+
+ def _acquire(self) -> None:
+ raise_on_exist_ro_file(self._lock_file)
+ mode = (
+ os.O_RDWR # open for read and write
+ | os.O_CREAT # create file if not exists
+ | os.O_TRUNC # truncate file if not empty
+ )
+ try:
+ fd = os.open(self._lock_file, mode)
+ except OSError as exception:
+ if exception.errno == ENOENT: # No such file or directory
+ raise
+ else:
+ try:
+ msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
+ except OSError:
+ os.close(fd)
+ else:
+ self._lock_file_fd = fd
+
+ def _release(self) -> None:
+ fd = cast(int, self._lock_file_fd)
+ self._lock_file_fd = None
+ msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
+ os.close(fd)
+
+ try:
+ os.remove(self._lock_file)
+ # Probably another instance of the application hat acquired the file lock.
+ except OSError:
+ pass
+
+else: # pragma: win32 no cover
+
+ class WindowsFileLock(BaseFileLock):
+ """Uses the :func:`msvcrt.locking` function to hard lock the lock file on windows systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+
+__all__ = [
+ "WindowsFileLock",
+]
diff --git a/thonny/vendored_libs/filelock/py.typed b/thonny/vendored_libs/filelock/py.typed
new file mode 100644
index 000000000..e69de29bb
diff --git a/thonny/vendored_libs/filelock/version.py b/thonny/vendored_libs/filelock/version.py
new file mode 100644
index 000000000..04265eefb
--- /dev/null
+++ b/thonny/vendored_libs/filelock/version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+version = '3.6.0'
+version_tuple = (3, 6, 0)
diff --git a/thonny/vendored_libs/pipkin/__init__.py b/thonny/vendored_libs/pipkin/__init__.py
new file mode 100644
index 000000000..cb399f65f
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/__init__.py
@@ -0,0 +1,67 @@
+import logging
+import subprocess
+import sys
+import traceback
+from typing import List, Optional
+
+from pipkin.adapters import create_adapter
+from pipkin.common import ManagementError, UserError
+from pipkin.session import Session
+
+logger = logging.getLogger("pipkin")
+
+__version__ = "0.2b1"
+
+
+def error(msg):
+ msg = "ERROR: " + msg
+ if sys.stderr.isatty():
+ print("\x1b[31m", msg, "\x1b[0m", sep="", file=sys.stderr)
+ else:
+ print(msg, file=sys.stderr)
+
+ return 1
+
+
+def main(raw_args: Optional[List[str]] = None) -> int:
+ from pipkin import parser
+
+ args = parser.parse_arguments(raw_args)
+
+ if args.verbose:
+ logging_level = logging.DEBUG
+ elif args.quiet:
+ logging_level = logging.ERROR
+ else:
+ logging_level = logging.INFO
+
+ logger.setLevel(logging_level)
+ logger.propagate = True
+ console_handler = logging.StreamHandler(sys.stderr)
+ console_handler.setLevel(logging_level)
+ logger.addHandler(console_handler)
+
+ args_dict = vars(args)
+
+ try:
+ adapter = create_adapter(**args_dict)
+ session = Session(adapter)
+ try:
+ command_handler = getattr(session, args.command)
+ command_handler(**args_dict)
+ finally:
+ session.close()
+ except KeyboardInterrupt:
+ return 1
+ except ManagementError as e:
+ logger.error(traceback.format_exc())
+ logger.error("SCRIPT: %r", e.script)
+ logger.error("OUT=%r", e.out)
+ logger.error("ERR=%r", e.err)
+ except UserError as e:
+ return error(str(e))
+ except subprocess.CalledProcessError:
+ # assuming the subprocess (pip) already printed the error
+ return 1
+
+ return 0
diff --git a/thonny/vendored_libs/pipkin/__main__.py b/thonny/vendored_libs/pipkin/__main__.py
new file mode 100644
index 000000000..21265c8e8
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/__main__.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+"""
+MIT License
+
+Copyright (c) 2022 Aivar Annamaa
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+import logging
+import subprocess
+import sys
+import textwrap
+from typing import List, Optional
+
+from pipkin import main
+
+logger = logging.getLogger(__name__)
+
+MP_ORG_INDEX = "https://micropython.org/pi"
+PYPI_INDEX = "https://pypi.org/pypi"
+PYPI_SIMPLE_INDEX = "https://pypi.org/simple"
+DEFAULT_INDEX_URLS = [MP_ORG_INDEX, PYPI_INDEX]
+SERVER_ENCODING = "utf-8"
+
+__version__ = "0.2b1"
+
+"""
+steps:
+ - infer target if no explicit connection parameters are given
+ - connect (MP)
+ - determine target location on the device/mount
+ - sync RTC (MP, install, uninstall). Not required for CP?
+ - ensure temp venv for pip operations
+ - fetch METADATA-s and RECORD-s (may be empty in all cases except "show -f") and populate venv
+ - record current state
+ - invoke pip (translate paths in the output)
+ - determine deleted and changed dists and remove these on the target (according to actual RECORD-s)
+ - determine new and changed dists and copy these to the target
+ - clear venv
+
+
+
+"""
+
+
+sys.exit(main(sys.argv[1:]))
diff --git a/thonny/vendored_libs/pipkin/adapters.py b/thonny/vendored_libs/pipkin/adapters.py
new file mode 100644
index 000000000..55528cadf
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/adapters.py
@@ -0,0 +1,443 @@
+import os.path
+import re
+from abc import ABC, abstractmethod
+from logging import getLogger
+from typing import Dict, List, Optional, Tuple
+
+from pipkin.common import UserError
+from pipkin.util import parse_meta_dir_name
+
+META_ENCODING = "utf-8"
+
+logger = getLogger(__name__)
+
+
+class Adapter(ABC):
+ """
+ An instance of Adapter is meant to be used by single instance of Session.
+ It is assumed that during the lifetime of an Adapter, sys.path stays fixed and
+ distributions and sys.path directories are only manipulated via this Adapter.
+ This requirement is related to the caching used in BaseAdapter.
+ """
+
+ @abstractmethod
+ def get_user_packages_path(self) -> Optional[str]:
+ """Unix / Windows ports return the location of user packages"""
+ ...
+
+ @abstractmethod
+ def get_default_target(self) -> str:
+ """Installation location if neither --user nor --target is specified"""
+ ...
+
+ @abstractmethod
+ def list_dists(self, paths: List[str] = None) -> Dict[str, Tuple[str, str]]:
+ """Return canonic names of the distributions mapped to their meta dir names and
+ installation paths.
+
+ If a distribution is installed to different sys.path locations, then return only the first one.
+ """
+ ...
+
+ @abstractmethod
+ def remove_dist(
+ self, dist_name: str, target: Optional[str] = None, above_target: bool = False
+ ) -> None:
+ """If target is given, then remove from this directory.
+ If above_path, then also remove from sys.path dirs which would hide the package at path.
+ Otherwise remove the first visible instance of the dist according to sys.path.
+ """
+ ...
+
+ @abstractmethod
+ def read_file(self, path: str) -> bytes:
+ """Path must be device's absolute path (ie. start with /)"""
+ ...
+
+ @abstractmethod
+ def write_file(self, path: str, content: bytes) -> None:
+ """Path must be device's absolute path (ie. start with /)"""
+ ...
+
+ @abstractmethod
+ def join_path(self, *parts: str) -> str:
+ ...
+
+ @abstractmethod
+ def split_dir_and_basename(self, path: str) -> Tuple[str, str]:
+ ...
+
+ @abstractmethod
+ def normpath(self, path: str) -> str:
+ ...
+
+ @abstractmethod
+ def get_implementation_name_and_version_prefix(self) -> Tuple[str, str]:
+ ...
+
+ @abstractmethod
+ def get_mpy_cross_args(self) -> List[str]:
+ ...
+
+
+class BaseAdapter(Adapter, ABC):
+ def __init__(self):
+ self._ensured_directories = set()
+ self._sys_path: Optional[List[str]] = None
+ self._sys_implementation: Optional[Tuple[str, str, int]] = None
+
+ def get_sys_path(self) -> List[str]:
+ if self._sys_path is None:
+ self._sys_path = self.fetch_sys_path()
+ return self._sys_path
+
+ def get_implementation_name_and_version_prefix(self) -> Tuple[str, str]:
+ impl = self.get_sys_implementation()
+ return impl[0], ".".join(impl[1].split(".")[:2])
+
+ def get_mpy_cross_args(self) -> List[str]:
+ impl = self.get_sys_implementation()
+ sys_mpy = impl[2]
+ if sys_mpy is None:
+ return []
+
+ # https://docs.micropython.org/en/latest/reference/mpyfiles.html#versioning-and-compatibility-of-mpy-files
+ args = []
+ arch = [
+ None,
+ "x86",
+ "x64",
+ "armv6",
+ "armv6m",
+ "armv7m",
+ "armv7em",
+ "armv7emsp",
+ "armv7emdp",
+ "xtensa",
+ "xtensawin",
+ ][sys_mpy >> 10]
+ if arch:
+ args.append("-march=" + arch)
+ if not sys_mpy & 0x200:
+ args.append("-mno-unicode")
+
+ return args
+
+ @abstractmethod
+ def fetch_sys_path(self) -> List[str]:
+ ...
+
+ def get_sys_implementation(self) -> Tuple[str, str, int]:
+ if self._sys_implementation is None:
+ self._sys_implementation = self.fetch_sys_implementation()
+ return self._sys_implementation
+
+ @abstractmethod
+ def fetch_sys_implementation(self) -> Tuple[str, str, int]:
+ ...
+
+ @abstractmethod
+ def remove_file_if_exists(self, path: str) -> None:
+ ...
+
+ @abstractmethod
+ def remove_dir_if_empty(self, path: str) -> bool:
+ ...
+
+ @abstractmethod
+ def list_meta_dir_names(self, path: str, dist_name: Optional[str] = None) -> List[str]:
+ """Return meta dir names from the indicated directory"""
+ ...
+
+ def get_default_target(self) -> str:
+ sys_path = self.get_sys_path()
+ # M5-Flow 2.0.0 has both /lib and /flash/libs
+ for candidate in ["/flash/lib", "/flash/libs", "/lib"]:
+ if candidate in sys_path:
+ return candidate
+
+ for entry in sys_path:
+ if "lib" in entry:
+ return entry
+ raise AssertionError("Could not determine default target")
+
+ def list_dists(self, paths: List[str] = None) -> Dict[str, Tuple[str, str]]:
+ if not paths:
+ # TODO: Consider considering only single directory
+ paths = [entry for entry in self.get_sys_path() if entry.startswith("/")]
+
+ result = {}
+ for path in paths:
+ for dir_name in self.list_meta_dir_names(path):
+ dist_name, _ = parse_meta_dir_name(dir_name)
+ if dist_name not in result:
+ result[dist_name] = dir_name, path
+
+ return result
+
+ def remove_dist(
+ self, dist_name: str, target: Optional[str] = None, above_target: bool = False
+ ) -> None:
+ could_remove = False
+ if target:
+ result = self.check_remove_dist_from_path(dist_name, target)
+ could_remove = could_remove or result
+ if above_target and target in self.get_sys_path():
+ for entry in self.get_sys_path():
+ if entry == "":
+ continue
+ elif entry == target:
+ break
+ else:
+ result = self.check_remove_dist_from_path(dist_name, entry)
+ could_remove = could_remove or result
+
+ else:
+ for entry in self.get_sys_path():
+ if entry.startswith("/"):
+ result = self.check_remove_dist_from_path(dist_name, entry)
+ could_remove = could_remove or result
+ if result:
+ break
+
+ if not could_remove:
+ logger.warning("Could not find %r for removing", dist_name)
+
+ def check_remove_dist_from_path(self, dist_name: str, path: str) -> bool:
+ meta_dirs = self.list_meta_dir_names(path, dist_name)
+ result = False
+ for meta_dir_name in meta_dirs:
+ self.remove_dist_by_meta_dir(path, meta_dir_name)
+ result = True
+
+ return result
+
+ def remove_dist_by_meta_dir(self, containing_dir: str, meta_dir_name: str) -> None:
+ record_bytes = self.read_file(self.join_path(containing_dir, meta_dir_name, "RECORD"))
+ record_lines = record_bytes.decode(META_ENCODING).splitlines()
+
+ package_dirs = set()
+ for line in record_lines:
+ rel_path, _, _ = line.split(",")
+ abs_path = self.join_path(containing_dir, rel_path)
+ logger.debug("Removing file %s", abs_path)
+ self.remove_file_if_exists(abs_path)
+ abs_dir, _ = self.split_dir_and_basename(abs_path)
+ while len(abs_dir) > len(containing_dir):
+ package_dirs.add(abs_dir)
+ abs_dir, _ = self.split_dir_and_basename(abs_dir)
+
+ for abs_dir in sorted(package_dirs, reverse=True):
+ did_remove = self.remove_dir_if_empty(abs_dir)
+ if did_remove and abs_dir in self._ensured_directories:
+ self._ensured_directories.remove(abs_dir)
+
+ def join_path(self, *parts: str) -> str:
+ assert parts
+ return self.get_dir_sep().join(parts)
+
+ def split_dir_and_basename(self, path: str) -> Tuple[str, str]:
+ dir_name, basename = path.rsplit(self.get_dir_sep(), maxsplit=1)
+ return dir_name or None, basename or None
+
+ def normpath(self, path: str) -> str:
+ return path.replace("\\", self.get_dir_sep()).replace("/", self.get_dir_sep())
+
+ @abstractmethod
+ def get_dir_sep(self) -> str:
+ ...
+
+ def write_file(self, path: str, content: bytes) -> None:
+ parent, _ = self.split_dir_and_basename(path)
+ self.ensure_dir_exists(parent)
+ self.write_file_in_existing_dir(path, content)
+
+ def ensure_dir_exists(self, path: str) -> None:
+ if (
+ path in self._ensured_directories
+ or path == "/"
+ or path.endswith(":")
+ or path.endswith(":\\")
+ ):
+ return
+ else:
+ parent, _ = self.split_dir_and_basename(path)
+ if parent:
+ self.ensure_dir_exists(parent)
+ self.mkdir_in_existing_parent_exists_ok(path)
+ self._ensured_directories.add(path)
+
+ @abstractmethod
+ def write_file_in_existing_dir(self, path: str, content: bytes) -> None:
+ ...
+
+ @abstractmethod
+ def mkdir_in_existing_parent_exists_ok(self, path: str) -> None:
+ ...
+
+
+class InterpreterAdapter(BaseAdapter, ABC):
+ """Base class for adapters, which communicate with an interpreter"""
+
+ def __init__(self, executable: str):
+ super().__init__()
+ self._executable = executable
+
+
+class ExecutableAdapter(InterpreterAdapter, ABC):
+ def get_dir_sep(self) -> str:
+ return os.path.sep
+
+
+class LocalExecutableAdapter(ExecutableAdapter):
+ ...
+
+
+class SshExecutableAdapter(ExecutableAdapter):
+ ...
+
+
+class LocalMirrorAdapter(BaseAdapter, ABC):
+ def __init__(self, base_path: str):
+ super().__init__()
+ self.base_path = base_path
+
+ def get_user_packages_path(self) -> Optional[str]:
+ return None
+
+ def get_dir_sep(self) -> str:
+ return "/"
+
+ def get_mpy_cross_args(self) -> List[str]:
+ return []
+
+ def read_file(self, path: str) -> bytes:
+ local_path = self.convert_to_local_path(path)
+ with open(local_path, "rb") as fp:
+ return fp.read()
+
+ def write_file_in_existing_dir(self, path: str, content: bytes) -> None:
+ local_path = self.convert_to_local_path(path)
+ assert not os.path.isdir(local_path)
+
+ block_size = 4 * 1024
+ with open(local_path, "wb") as fp:
+ while content:
+ block = content[:block_size]
+ content = content[block_size:]
+ bytes_written = fp.write(block)
+ fp.flush()
+ os.fsync(fp)
+ assert bytes_written == len(block)
+
+ def remove_file_if_exists(self, path: str) -> None:
+ local_path = self.convert_to_local_path(path)
+ if os.path.exists(local_path):
+ os.remove(local_path)
+
+ def remove_dir_if_empty(self, path: str) -> bool:
+ local_path = self.convert_to_local_path(path)
+ assert os.path.isdir(local_path)
+ content = os.listdir(local_path)
+ if content:
+ return False
+ else:
+ os.rmdir(local_path)
+ return True
+
+ def mkdir_in_existing_parent_exists_ok(self, path: str) -> None:
+ local_path = self.convert_to_local_path(path)
+ if not os.path.isdir(local_path):
+ assert not os.path.exists(local_path)
+ os.mkdir(local_path, 0o755)
+
+ def convert_to_local_path(self, device_path: str) -> str:
+ assert device_path.startswith("/")
+ return os.path.normpath(self.base_path + device_path)
+
+ def list_meta_dir_names(self, path: str, dist_name: Optional[str] = None) -> List[str]:
+ local_path = self.convert_to_local_path(path)
+ try:
+ return [
+ name
+ for name in os.listdir(local_path)
+ if name.endswith(".dist-info")
+ and (dist_name is None or name.startswith(dist_name + "-"))
+ ]
+ except FileNotFoundError:
+ # skipping non-existing dirs
+ return []
+
+
+class MountAdapter(LocalMirrorAdapter):
+ def __init__(self, base_path: str):
+ super().__init__(base_path)
+ if not os.path.exists(base_path):
+ raise UserError(f"Can't find mount point {base_path}")
+ if os.path.isfile(base_path):
+ raise UserError(f"Mount point {base_path} can't be a file")
+
+ self._circuitpython_version = self._infer_cp_version()
+
+ def fetch_sys_path(self) -> List[str]:
+ if os.path.isdir(os.path.join(self.base_path, "lib")) or self.is_circuitpython():
+ return ["", "/", ".frozen", "/lib"]
+ elif os.path.isdir(os.path.join(self.base_path, "flash")):
+ return ["", "/flash", "/flash/lib"]
+ else:
+ return ["", "/", ".frozen", "/lib"]
+
+ def fetch_sys_implementation(self) -> Tuple[str, str, int]:
+ if self._circuitpython_version:
+ return ("circuitpython", self._circuitpython_version, 0)
+ else:
+ raise UserError("Could not determine sys.implementation")
+
+ def is_circuitpython(self) -> bool:
+ # TODO: better look into the file as well
+ return os.path.isfile(os.path.join(self.base_path, "boot_out.txt"))
+
+ def _infer_cp_version(self) -> Optional[str]:
+ boot_out_path = os.path.join(self.base_path, "boot_out.txt")
+ if os.path.exists(boot_out_path):
+ with open(boot_out_path, encoding="utf-8") as fp:
+ firmware_info = fp.readline().strip()
+ match = re.match(r".*?CircuitPython (\d+\.\d+)\..+?", firmware_info)
+ if match:
+ return match.group(1)
+
+ return None
+
+
+class DirAdapter(LocalMirrorAdapter):
+ def __init__(self, base_path: str):
+ super().__init__(base_path)
+ if not os.path.isdir(base_path):
+ assert not os.path.exists(base_path)
+ os.makedirs(base_path, mode=0o755)
+
+ def fetch_sys_path(self) -> List[str]:
+ # This means, list command without --path will consider this directory
+ return ["/"]
+
+ def fetch_sys_implementation(self) -> Tuple[str, str, int]:
+ # TODO:
+ return ("micropython", "1.18", 0)
+
+ def get_default_target(self) -> str:
+ return "/"
+
+
+def create_adapter(port: Optional[str], mount: Optional[str], dir: Optional[str], **kw) -> Adapter:
+ if port:
+ from pipkin import bare_metal, serial_connection
+
+ connection = serial_connection.SerialConnection(port)
+ return bare_metal.SerialPortAdapter(connection)
+ elif dir:
+ return DirAdapter(dir)
+ elif mount:
+ return MountAdapter(mount)
+ else:
+ # TODO: infer the target
+ raise NotImplementedError()
diff --git a/thonny/vendored_libs/pipkin/bare_metal.py b/thonny/vendored_libs/pipkin/bare_metal.py
new file mode 100644
index 000000000..54712609f
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/bare_metal.py
@@ -0,0 +1,895 @@
+import ast
+import binascii
+import errno
+import os
+import re
+import struct
+import time
+from abc import ABC
+from logging import getLogger
+from textwrap import dedent
+from typing import Any, Callable, Dict, List, Optional, Tuple
+
+from pipkin import UserError
+from pipkin.adapters import BaseAdapter
+from pipkin.common import CommunicationError, ManagementError, ProtocolError
+from pipkin.connection import MicroPythonConnection, ReadingTimeoutError
+from pipkin.serial_connection import SerialConnection
+from pipkin.util import starts_with_continuation_byte
+
+logger = getLogger(__name__)
+
+RAW_MODE_CMD = b"\x01"
+NORMAL_MODE_CMD = b"\x02"
+INTERRUPT_CMD = b"\x03"
+SOFT_REBOOT_CMD = b"\x04"
+PASTE_MODE_CMD = b"\x05"
+PASTE_MODE_LINE_PREFIX = b"=== "
+
+PASTE_SUBMIT_MODE = "paste"
+RAW_PASTE_SUBMIT_MODE = "raw_paste"
+RAW_SUBMIT_MODE = "raw"
+
+RAW_PASTE_COMMAND = b"\x05A\x01"
+RAW_PASTE_CONFIRMATION = b"R\x01"
+RAW_PASTE_CONTINUE = b"\x01"
+
+MGMT_VALUE_START = b""
+MGMT_VALUE_END = b""
+
+# How many seconds to wait for something that should appear quickly.
+# In other words -- how long to wait with reporting a protocol error
+# (hoping that the required piece is still coming)
+WAIT_OR_CRASH_TIMEOUT = 5
+
+FIRST_RAW_PROMPT = b"raw REPL; CTRL-B to exit\r\n>"
+
+RAW_PROMPT = b">"
+
+
+WEBREPL_REQ_S = "<2sBBQLH64s"
+WEBREPL_PUT_FILE = 1
+WEBREPL_GET_FILE = 2
+
+
+EOT = b"\x04"
+NORMAL_PROMPT = b">>> "
+LF = b"\n"
+OK = b"OK"
+
+ENCODING = "utf-8"
+TRACEBACK_MARKER = b"Traceback (most recent call last):"
+
+OutputConsumer = Callable[[str, str], None]
+
+
+class BareMetalAdapter(BaseAdapter, ABC):
+ def __init__(
+ self,
+ connection: MicroPythonConnection,
+ submit_mode: Optional[str] = None,
+ write_block_size: Optional[int] = None,
+ write_block_delay: Optional[float] = None,
+ ):
+ super().__init__()
+ self._connection = connection
+ (
+ self._submit_mode,
+ self._write_block_size,
+ self._write_block_delay,
+ ) = self._infer_submit_parameters(submit_mode, write_block_size, write_block_delay)
+ self._last_prompt: Optional[bytes] = None
+
+ self._interrupt_to_prompt()
+ self._prepare_helper()
+
+ def get_dir_sep(self) -> str:
+ return "/"
+
+ def _infer_submit_parameters(
+ self,
+ submit_mode: Optional[str] = None,
+ write_block_size: Optional[int] = None,
+ write_block_delay: Optional[float] = None,
+ ) -> Tuple[str, int, float]:
+
+ if submit_mode is None:
+ submit_mode = RAW_PASTE_SUBMIT_MODE
+
+ if write_block_size is None:
+ write_block_size = 255
+
+ if write_block_delay is None:
+ if submit_mode == RAW_SUBMIT_MODE:
+ write_block_delay = 0.01
+ else:
+ write_block_delay = 0.0
+
+ return submit_mode, write_block_size, write_block_delay
+
+ def _interrupt_to_prompt(self) -> None:
+ # It's safer to thoroughly interrupt before poking with RAW_MODE_CMD
+ # as Pico may get stuck otherwise
+ # https://github.com/micropython/micropython/issues/7867
+ interventions = [(INTERRUPT_CMD, 0.1), (INTERRUPT_CMD, 0.1), (RAW_MODE_CMD, 0.1)]
+
+ for cmd, timeout in interventions:
+ self._write(cmd)
+ try:
+ self._log_output_until_active_prompt(timeout=timeout)
+ break
+ except ReadingTimeoutError as e:
+ logger.debug(
+ "Could not get prompt with intervention %r and timeout %r. Read bytes: %r",
+ cmd,
+ timeout,
+ e.read_bytes,
+ )
+ # Try again as long as there are interventions left
+ else:
+ raise CommunicationError("Could not get raw REPL")
+
+ def _log_output(self, data: bytes, stream: str = "stdout") -> None:
+ logger.debug("read %s: %r", stream, data)
+
+ def _prepare_helper(self) -> None:
+ script = (
+ dedent(
+ """
+ class __pipkin_helper:
+ import builtins
+ try:
+ import uos as os
+ except builtins.ImportError:
+ import os
+ import sys
+
+ @builtins.classmethod
+ def print_mgmt_value(cls, obj):
+ cls.builtins.print({mgmt_start!r}, cls.builtins.repr(obj), {mgmt_end!r}, sep='', end='')
+ """
+ ).format(
+ mgmt_start=MGMT_VALUE_START.decode(ENCODING),
+ mgmt_end=MGMT_VALUE_END.decode(ENCODING),
+ )
+ + "\n"
+ ).lstrip()
+ self._execute_without_output(script)
+
+ def fetch_sys_path(self) -> List[str]:
+ return self._evaluate("__pipkin_helper.sys.path")
+
+ def fetch_sys_implementation(self) -> Tuple[str, str, int]:
+ return self._evaluate("__pipkin_helper.builtins.tuple(__pipkin_helper.sys.implementation)")
+
+ def get_user_packages_path(self) -> Optional[str]:
+ return None
+
+ def read_file(self, path: str) -> bytes:
+
+ hex_mode = self._should_hexlify(path)
+
+ open_script = dedent(
+ f"""
+ try:
+ __pipkin_fp = __pipkin_helper.builtins.open({path!r}, 'rb')
+ except OSError as e:
+ print(e)
+ """
+ )
+ out, err = self._execute_and_capture_output(open_script)
+
+ if (out + err).strip():
+ if any(str(nr) in out + err for nr in [errno.ENOENT, errno.ENODEV]):
+ raise FileNotFoundError(f"Can't find {path} on target")
+ else:
+ raise ManagementError(
+ f"Could not open file {path} for reading", script=open_script, out=out, err=err
+ )
+
+ if hex_mode:
+ self._execute_without_output("from binascii import hexlify as __temp_hexlify")
+
+ block_size = 1024
+ num_bytes_read = 0
+ blocks = []
+ while True:
+ if hex_mode:
+ block = binascii.unhexlify(
+ self._evaluate("__temp_hexlify(__pipkin_fp.read(%s))" % block_size)
+ )
+ else:
+ block = self._evaluate("__pipkin_fp.read(%s)" % block_size)
+
+ if block:
+ blocks.append(block)
+ num_bytes_read += len(block)
+
+ if len(block) < block_size:
+ break
+
+ self._execute_without_output(
+ dedent(
+ """
+ __pipkin_fp.close()
+ del __pipkin_fp
+ try:
+ del __temp_hexlify
+ except:
+ pass
+ """
+ )
+ )
+
+ return b"".join(blocks)
+
+ def remove_file_if_exists(self, path: str) -> None:
+ self._execute_without_output(
+ dedent(
+ f"""
+ try:
+ __pipkin_helper.os.remove({path!r})
+ except OSError as e:
+ if e.errno not in [{errno.ENOENT}, {errno.ENODEV}]:
+ raise
+ """
+ )
+ )
+
+ def remove_dir_if_empty(self, path: str) -> bool:
+ return self._evaluate(
+ dedent(
+ f"""
+ if __pipkin_helper.os.listdir({path!r}):
+ __pipkin_helper.print_mgmt_value(False)
+ else:
+ __pipkin_helper.os.remove({path!r})
+ __pipkin_helper.print_mgmt_value(True)
+ """
+ )
+ )
+
+ def mkdir_in_existing_parent_exists_ok(self, path: str) -> None:
+ self._execute_without_output(
+ dedent(
+ f"""
+ try:
+ __pipkin_helper.os.mkdir({path!r})
+ except __pipkin_helper.builtins.OSError as e:
+ if e.errno != {errno.EEXIST}:
+ raise
+ """
+ )
+ )
+
+ def list_meta_dir_names(self, path: str, dist_name: Optional[str] = None) -> List[str]:
+ if dist_name:
+ dist_name_condition = f"and name.startswith({dist_name+'-'!r})"
+ else:
+ dist_name_condition = ""
+
+ return self._evaluate(
+ dedent(
+ f"""
+ try:
+ __pipkin_helper.print_mgmt_value([
+ name for name
+ in __pipkin_helper.os.listdir({path!r})
+ if name.endswith('.dist-info') {dist_name_condition}
+ ])
+ except OSError as e:
+ if e.errno in [{errno.ENODEV}, {errno.ENOENT}]:
+ __pipkin_helper.print_mgmt_value([])
+ else:
+ raise
+"""
+ )
+ )
+
+ def _submit_code(self, script: str) -> None:
+ assert script
+
+ to_be_sent = script.encode("UTF-8")
+ logger.debug("Submitting via %s: %r", self._submit_mode, to_be_sent[:1000])
+
+ # assuming we are already at a prompt, but threads may have produced something extra
+ discarded_bytes = self._connection.read_all()
+ if discarded_bytes:
+ logger.info("Discarding %r", discarded_bytes)
+
+ if self._submit_mode == PASTE_SUBMIT_MODE:
+ self._submit_code_via_paste_mode(to_be_sent)
+ elif self._submit_mode == RAW_PASTE_SUBMIT_MODE:
+ try:
+ self._submit_code_via_raw_paste_mode(to_be_sent)
+ except RawPasteNotSupportedError:
+ logger.warning("Could not use expected raw paste, falling back to paste mode")
+ self._submit_mode = PASTE_SUBMIT_MODE
+ self._submit_code_via_paste_mode(to_be_sent)
+ else:
+ self._submit_code_via_raw_mode(to_be_sent)
+
+ def _submit_code_via_paste_mode(self, script_bytes: bytes) -> None:
+ # Go to paste mode
+ self._ensure_normal_mode()
+ self._write(PASTE_MODE_CMD)
+ discarded = self._connection.read_until(PASTE_MODE_LINE_PREFIX)
+ logger.debug("Discarding %r", discarded)
+
+ # Send script
+ while script_bytes:
+ block = script_bytes[: self._write_block_size]
+ script_bytes = script_bytes[self._write_block_size :]
+
+ # find proper block boundary
+ while True:
+ expected_echo = block.replace(b"\r\n", b"\r\n" + PASTE_MODE_LINE_PREFIX)
+ if (
+ len(expected_echo) > self._write_block_size
+ or block.endswith(b"\r")
+ or len(block) > 2
+ and starts_with_continuation_byte(script_bytes)
+ ):
+ # move last byte to the next block
+ script_bytes = block[-1:] + script_bytes
+ block = block[:-1]
+ continue
+ else:
+ break
+
+ self._write(block)
+ self._connection.read_all_expected(expected_echo, timeout=WAIT_OR_CRASH_TIMEOUT)
+
+ # push and read confirmation
+ self._write(EOT)
+ expected_confirmation = b"\r\n"
+ actual_confirmation = self._connection.read(
+ len(expected_confirmation), timeout=WAIT_OR_CRASH_TIMEOUT
+ )
+ assert actual_confirmation == expected_confirmation, "Expected %r, got %r" % (
+ expected_confirmation,
+ actual_confirmation,
+ )
+
+ def _submit_code_via_raw_mode(self, script_bytes: bytes) -> None:
+ self._ensure_raw_mode()
+
+ to_be_written = script_bytes + EOT
+
+ while to_be_written:
+ block = to_be_written[self._write_block_size :]
+ self._write(block)
+ to_be_written = to_be_written[len(block) :]
+ if to_be_written:
+ time.sleep(self._write_block_delay)
+
+ # fetch command confirmation
+ confirmation = self._connection.soft_read(2, timeout=WAIT_OR_CRASH_TIMEOUT)
+
+ if confirmation != OK:
+ data = confirmation + self._connection.read_all()
+ data += self._connection.read(1, timeout=1, timeout_is_soft=True)
+ data += self._connection.read_all()
+ logger.error(
+ "Could not read command confirmation for script\n\n: %s\n\n" "Got: %r",
+ script_bytes,
+ data,
+ )
+ raise ProtocolError("Could not read command confirmation")
+
+ def _submit_code_via_raw_paste_mode(self, script_bytes: bytes) -> None:
+ self._ensure_raw_mode()
+ self._connection.set_text_mode(False)
+ self._write(RAW_PASTE_COMMAND)
+ response = self._connection.soft_read(2, timeout=WAIT_OR_CRASH_TIMEOUT)
+ if response != RAW_PASTE_CONFIRMATION:
+ # Occasionally, the device initially supports raw paste but later doesn't allow it
+ # https://github.com/thonny/thonny/issues/1545
+ time.sleep(0.01)
+ response += self._connection.read_all()
+ if response == FIRST_RAW_PROMPT:
+ self._last_prompt = FIRST_RAW_PROMPT
+ raise RawPasteNotSupportedError()
+ else:
+ logger.error("Got %r instead of raw-paste confirmation", response)
+ raise ProtocolError("Could not get raw-paste confirmation")
+
+ self._raw_paste_write(script_bytes)
+ self._connection.set_text_mode(True)
+
+ def _raw_paste_write(self, command_bytes):
+ # Adapted from https://github.com/micropython/micropython/commit/a59282b9bfb6928cd68b696258c0dd2280244eb3#diff-cf10d3c1fe676599a983c0ec85b78c56c9a6f21b2d896c69b3e13f34d454153e
+
+ # Read initial header, with window size.
+ data = self._connection.soft_read(2, timeout=2)
+ assert len(data) == 2, "Could not read initial header, got %r" % (
+ data + self._connection.read_all()
+ )
+ window_size = data[0] | data[1] << 8
+ window_remain = window_size
+
+ # Write out the command_bytes data.
+ i = 0
+ while i < len(command_bytes):
+ while window_remain == 0 or not self._connection.incoming_is_empty():
+ data = self._connection.soft_read(1, timeout=WAIT_OR_CRASH_TIMEOUT)
+ if data == b"\x01":
+ # Device indicated that a new window of data can be sent.
+ window_remain += window_size
+ elif data == b"\x04":
+ # Device indicated abrupt end, most likely a syntax error.
+ # Acknowledge it and finish.
+ self._write(b"\x04")
+ logger.debug(
+ "Abrupt end of raw paste submit after submitting %s bytes out of %s",
+ i,
+ len(command_bytes),
+ )
+ return
+ else:
+ # Unexpected data from device.
+ logger.error("Unexpected read during raw paste: %r", data)
+ raise ProtocolError("Unexpected read during raw paste")
+ # Send out as much data as possible that fits within the allowed window.
+ b = command_bytes[i : min(i + window_remain, len(command_bytes))]
+ self._write(b)
+ window_remain -= len(b)
+ i += len(b)
+
+ # Indicate end of data.
+ self._write(b"\x04")
+
+ # Wait for device to acknowledge end of data.
+ data = self._connection.soft_read_until(b"\x04", timeout=WAIT_OR_CRASH_TIMEOUT)
+ if not data.endswith(b"\x04"):
+ logger.error("Could not complete raw paste. Ack: %r", data)
+ raise ProtocolError("Could not complete raw paste")
+
+ def _ensure_raw_mode(self):
+ if self._last_prompt in [
+ RAW_PROMPT,
+ EOT + RAW_PROMPT,
+ FIRST_RAW_PROMPT,
+ ]:
+ return
+ logger.debug("requesting raw mode at %r", self._last_prompt)
+
+ # assuming we are currently on a normal prompt
+ self._write(RAW_MODE_CMD)
+ self._log_output_until_active_prompt()
+ if self._last_prompt == NORMAL_PROMPT:
+ # Don't know why this happens sometimes (e.g. when interrupting a Ctrl+D or restarted
+ # program, which is outputting text on ESP32)
+ logger.info("Found normal prompt instead of expected raw prompt. Trying again.")
+ self._write(RAW_MODE_CMD)
+ time.sleep(0.5)
+ self._log_output_until_active_prompt()
+
+ if self._last_prompt != FIRST_RAW_PROMPT:
+ logger.error(
+ "Could not enter raw prompt, got %r",
+ self._last_prompt,
+ )
+ raise ProtocolError("Could not enter raw prompt")
+
+ def _ensure_normal_mode(self, force=False):
+ if self._last_prompt == NORMAL_PROMPT and not force:
+ return
+
+ logger.debug("requesting normal mode at %r", self._last_prompt)
+ self._write(NORMAL_MODE_CMD)
+ self._log_output_until_active_prompt()
+ assert self._last_prompt == NORMAL_PROMPT, (
+ "Could not get normal prompt, got %s" % self._last_prompt
+ )
+
+ def _log_output_until_active_prompt(self, timeout: float = WAIT_OR_CRASH_TIMEOUT) -> None:
+ def collect_output(data, stream):
+ if data:
+ logger.info("Discarding %s: %r", stream, data)
+
+ self._process_output_until_active_prompt(collect_output, timeout=timeout)
+
+ def _capture_output_until_active_prompt(self, timeout: float) -> Tuple[str, str]:
+ output = {"stdout": "", "stderr": ""}
+
+ def collect_output(data, stream):
+ output[stream] += data
+
+ self._process_output_until_active_prompt(collect_output, timeout=timeout)
+
+ return output["stdout"], output["stderr"]
+
+ def _process_output_until_active_prompt(
+ self,
+ output_consumer: OutputConsumer,
+ timeout: float,
+ ):
+
+ PROMPT_MARKERS = [NORMAL_PROMPT, EOT + RAW_PROMPT, FIRST_RAW_PROMPT]
+ PROMPT_MARKERS_RE = re.compile(
+ b"|".join(
+ map(
+ re.escape,
+ PROMPT_MARKERS,
+ )
+ )
+ )
+
+ start_time = time.time()
+
+ while True:
+ spent_time = time.time() - start_time
+ time_left = max(timeout - spent_time, 0.0)
+ data = self._connection.read_until(PROMPT_MARKERS_RE, timeout=time_left)
+ assert any(data.endswith(marker) for marker in PROMPT_MARKERS)
+
+ for prompt in PROMPT_MARKERS:
+ if data.endswith(prompt):
+ self._last_prompt = prompt
+ content = data[: -len(self._last_prompt)]
+ if EOT in content:
+ out, err = content.split(EOT, maxsplit=1)
+ elif TRACEBACK_MARKER in content:
+ out, err = content.split(TRACEBACK_MARKER, maxsplit=1)
+ err = TRACEBACK_MARKER + err
+ else:
+ out = content
+ err = b""
+ output_consumer(out.decode(ENCODING), "stdout")
+ if err:
+ output_consumer(err.decode(ENCODING), "stderr")
+ break
+
+ # Check if it's really active prompt
+ follow_up = self._connection.soft_read(1, timeout=0.01)
+ if follow_up:
+ # Nope, the prompt is not active.
+ # (Actually it may be that a background thread has produced this follow up,
+ # but this would be too hard to consider.)
+ # Don't output yet, because the follow-up may turn into another prompt,
+ # and they can be captured all together.
+ self._connection.unread(follow_up)
+ output_consumer(self._last_prompt.decode(ENCODING), "stdout")
+ else:
+ break
+
+ def _evaluate(self, script: str) -> Any:
+ """Evaluate the output of the script or raise error, if anything looks wrong.
+
+ Adds printing code if the script contains single expression and doesn't
+ already contain printing code"""
+ try:
+ ast.parse(script, mode="eval")
+ prefix = "__pipkin_helper.print_mgmt_value("
+ suffix = ")"
+ if not script.strip().startswith(prefix):
+ script = prefix + script + suffix
+ except SyntaxError:
+ pass
+
+ out, err = self._execute_and_capture_output(script)
+ if err:
+ raise ManagementError("Script produced errors", script, out, err)
+ elif (
+ MGMT_VALUE_START.decode(ENCODING) not in out
+ or MGMT_VALUE_END.decode(ENCODING) not in out
+ ):
+ raise ManagementError("Management markers missing", script, out, err)
+
+ start_token_pos = out.index(MGMT_VALUE_START.decode(ENCODING))
+ end_token_pos = out.index(MGMT_VALUE_END.decode(ENCODING))
+
+ # a thread or IRQ handler may have written something before or after mgmt value
+ prefix = out[:start_token_pos]
+ value_str = out[start_token_pos + len(MGMT_VALUE_START) : end_token_pos]
+ suffix = out[end_token_pos + len(MGMT_VALUE_END) :]
+
+ try:
+ value = ast.literal_eval(value_str)
+ except Exception as e:
+ raise ManagementError("Could not parse management response", script, out, err) from e
+
+ if prefix:
+ logger.warning("Eval output had unexpected prefix: %r", prefix)
+ if suffix:
+ logger.warning("Eval output had unexpected suffix: %r", suffix)
+
+ return value
+
+ def _write(self, data: bytes) -> None:
+ if (
+ RAW_MODE_CMD in data
+ or NORMAL_MODE_CMD in data
+ or INTERRUPT_CMD in data
+ or EOT in data
+ or PASTE_MODE_CMD in data
+ ):
+ logger.debug("Sending ctrl chars: %r", data)
+ num_bytes = self._connection.write(data)
+ assert num_bytes == len(data)
+
+ def _should_hexlify(self, path):
+ for ext in (".py", ".txt", ".csv"):
+ if path.lower().endswith(ext):
+ return False
+
+ return True
+
+ def _execute_without_output(self, script: str, timeout: float = WAIT_OR_CRASH_TIMEOUT) -> None:
+ """Meant for management tasks."""
+ out, err = self._execute_and_capture_output(script, timeout=timeout)
+ if out or err:
+ raise ManagementError("Command output was not empty", script, out, err)
+
+ def _execute_and_capture_output(
+ self, script: str, timeout: float = WAIT_OR_CRASH_TIMEOUT
+ ) -> Tuple[str, str]:
+ output_lists: Dict[str, List[str]] = {"stdout": [], "stderr": []}
+
+ def consume_output(data, stream_name):
+ assert isinstance(data, str)
+ output_lists[stream_name].append(data)
+
+ self._execute_with_consumer(script, consume_output, timeout=timeout)
+ result = ["".join(output_lists[name]) for name in ["stdout", "stderr"]]
+ return result[0], result[1]
+
+ def _execute_with_consumer(
+ self, script, output_consumer: OutputConsumer, timeout: float
+ ) -> None:
+ self._submit_code(script)
+ self._process_output_until_active_prompt(output_consumer, timeout=timeout)
+
+
+class SerialPortAdapter(BareMetalAdapter):
+ def __init__(
+ self,
+ connection: SerialConnection,
+ submit_mode: Optional[str] = None,
+ write_block_size: Optional[int] = None,
+ write_block_delay: Optional[float] = None,
+ mount_path: Optional[str] = None,
+ ):
+ super().__init__(
+ connection,
+ submit_mode=submit_mode,
+ write_block_size=write_block_size,
+ write_block_delay=write_block_delay,
+ )
+ self._mount_path = mount_path
+
+ def _internal_path_to_mounted_path(self, target_path: str) -> str:
+ assert self._mount_path
+ assert target_path.startswith("/")
+ return os.path.normpath(os.path.join(self._mount_path, target_path[1:]))
+
+ def write_file_in_existing_dir(self, path: str, content: bytes) -> None:
+ start_time = time.time()
+
+ try:
+ self._write_file_via_serial(path, content)
+ except ReadOnlyFilesystemError as e:
+ if self._mount_path is not None:
+ self._write_file_via_mount(path, content)
+ else:
+ raise UserError(
+ "Target filesystem seems to be read-only. "
+ "If your device mounts its filesystem as a disk, "
+ "you may be able to manage it by using the --mount argument."
+ ) from e
+
+ logger.info("Wrote %s in %.1f seconds", path, time.time() - start_time)
+
+ def _write_file_via_mount(
+ self,
+ target_path: str,
+ content: bytes,
+ ) -> None:
+ mounted_target_path = self._internal_path_to_mounted_path(target_path)
+ with open(mounted_target_path, "wb") as f:
+ bytes_written = 0
+ block_size = 4 * 1024
+ to_be_written = content
+ while to_be_written:
+ block = to_be_written[:block_size]
+ bytes_written += f.write(block)
+ assert bytes_written
+ f.flush()
+ os.fsync(f)
+ to_be_written = to_be_written[block_size:]
+
+ assert bytes_written == len(content)
+
+ def _write_file_via_serial(self, target_path: str, content: bytes) -> None:
+ out, err = self._execute_and_capture_output(
+ dedent(
+ """
+ try:
+ __pipkin_path = '{path}'
+ __pipkin_written = 0
+ __pipkin_fp = __pipkin_helper.builtins.open(__pipkin_path, 'wb')
+ except __pipkin_helper.builtins.Exception as e:
+ __pipkin_helper.builtins.print(__pipkin_helper.builtins.str(e))
+ """
+ ).format(path=target_path),
+ )
+
+ if self._contains_read_only_error(out + err):
+ raise ReadOnlyFilesystemError()
+ elif out + err:
+ raise OSError(
+ "Could not open file %s for writing, output:\n%s" % (target_path, out + err)
+ )
+
+ # Define function to allow shorter write commands
+ hex_mode = self._should_hexlify(target_path)
+ if hex_mode:
+ self._execute_without_output(
+ dedent(
+ """
+ from binascii import unhexlify as __pipkin_unhex
+ def __W(x):
+ global __pipkin_written
+ __pipkin_written += __pipkin_fp.write(__pipkin_unhex(x))
+ __pipkin_fp.flush()
+ if __pipkin_helper.builtins.hasattr(__pipkin_helper.os, "sync"):
+ __pipkin_helper.os.sync()
+ """
+ )
+ )
+ else:
+ self._execute_without_output(
+ dedent(
+ """
+ def __W(x):
+ global __pipkin_written
+ __pipkin_written += __pipkin_fp.write(x)
+ __pipkin_fp.flush()
+ if __pipkin_helper.builtins.hasattr(__pipkin_helper.os, "sync"):
+ __pipkin_helper.os.sync()
+ """
+ )
+ )
+
+ bytes_sent = 0
+ block_size = 1024
+
+ to_be_written = content
+ while to_be_written:
+ block = to_be_written[:block_size]
+ if hex_mode:
+ script = "__W(%r)" % binascii.hexlify(block)
+ else:
+ script = "__W(%r)" % block
+ out, err = self._execute_and_capture_output(script)
+ if out or err:
+ logger.error("Writing file produced unexpected output (%r) or error (%r)", out, err)
+ raise UserError(
+ "Could not write next block after having written %d bytes to %s"
+ % (bytes_sent, target_path)
+ )
+
+ bytes_sent += len(block)
+ to_be_written = to_be_written[block_size:]
+
+ bytes_received = self._evaluate("__pipkin_written")
+
+ if bytes_received != bytes_sent:
+ raise OSError("Expected %d written bytes but wrote %d" % (bytes_sent, bytes_received))
+
+ # clean up
+ self._execute_without_output(
+ dedent(
+ """
+ try:
+ del __W
+ del __pipkin_written
+ del __pipkin_path
+ __pipkin_fp.close()
+ del __pipkin_fp
+ del __pipkin_result
+ del __pipkin_unhex
+ except:
+ pass
+ """
+ )
+ )
+
+ def remove_file_if_exists(self, path: str) -> None:
+ try:
+ super().remove_file_if_exists(path)
+ except ManagementError as e:
+ if self._contains_read_only_error(e.out + e.err) and self._mount_path:
+ self._remove_file_via_mount(path)
+ else:
+ raise
+
+ def _remove_file_via_mount(self, target_path: str) -> None:
+ mounted_target_path = self._internal_path_to_mounted_path(target_path)
+ assert os.path.isfile(mounted_target_path)
+ os.remove(mounted_target_path)
+
+ def _contains_read_only_error(self, s: str) -> bool:
+ canonic_out = s.replace("-", "").lower()
+ return (
+ "readonly" in canonic_out
+ or f"errno {errno.EROFS}" in canonic_out
+ or f"oserror: {errno.EROFS}" in canonic_out
+ )
+
+ def mkdir_in_existing_parent_exists_ok(self, path: str) -> None:
+ try:
+ super().mkdir_in_existing_parent_exists_ok(path)
+ except ManagementError as e:
+ if self._contains_read_only_error(e.out + e.err):
+ self._mkdir_via_mount(path)
+ else:
+ raise
+
+ def _mkdir_via_mount(self, path: str) -> bool:
+ mounted_path = self._internal_path_to_mounted_path(path)
+ if not os.path.isdir(mounted_path):
+ assert not os.path.exists(mounted_path)
+ os.mkdir(mounted_path, 0o755)
+ return True
+ else:
+ return False
+
+ def remove_dir_if_empty(self, path: str) -> bool:
+ try:
+ return super().remove_dir_if_empty(path)
+ except ManagementError as e:
+ if self._contains_read_only_error(e.out + e.err):
+ return self._mkdir_via_mount(path)
+ else:
+ raise
+
+ def _remove_dir_if_empty_via_mount(self, path: str) -> bool:
+ mounted_path = self._internal_path_to_mounted_path(path)
+ if os.listdir(mounted_path):
+ return False
+ else:
+ os.rmdir(mounted_path)
+ return True
+
+
+class WebReplAdapter(BareMetalAdapter):
+ def write_file_in_existing_dir(self, path: str, content: bytes) -> None:
+ """
+ Adapted from https://github.com/micropython/webrepl/blob/master/webrepl_cli.py
+ """
+ dest_fname = path.encode("utf-8")
+ rec = struct.pack(
+ WEBREPL_REQ_S, b"WA", WEBREPL_PUT_FILE, 0, 0, len(content), len(dest_fname), dest_fname
+ )
+ self._connection.set_text_mode(False)
+ try:
+ self._write(rec[:10])
+ self._write(rec[10:])
+ assert self._read_websocket_response() == 0
+
+ to_be_written = content
+ block_size = 1024
+ while to_be_written:
+ block = to_be_written[:block_size]
+ self._write(block)
+ to_be_written = to_be_written[block_size:]
+
+ assert self._read_websocket_response() == 0
+ finally:
+ self._connection.set_text_mode(True)
+
+ def _read_websocket_response(self):
+ data = self._connection.read(4)
+ sig, code = struct.unpack("<2sH", data)
+ assert sig == b"WB"
+ return code
+
+
+class RawPasteNotSupportedError(RuntimeError):
+ pass
+
+
+class ReadOnlyFilesystemError(OSError):
+ pass
diff --git a/thonny/vendored_libs/pipkin/common.py b/thonny/vendored_libs/pipkin/common.py
new file mode 100644
index 000000000..f74e01b5c
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/common.py
@@ -0,0 +1,18 @@
+class UserError(RuntimeError):
+ pass
+
+
+class CommunicationError(RuntimeError):
+ pass
+
+
+class ProtocolError(RuntimeError):
+ pass
+
+
+class ManagementError(ProtocolError):
+ def __init__(self, msg: str, script: str, out: str, err: str):
+ super().__init__(self, msg)
+ self.script = script
+ self.out = out
+ self.err = err
diff --git a/thonny/vendored_libs/pipkin/connection.py b/thonny/vendored_libs/pipkin/connection.py
new file mode 100644
index 000000000..fc1782fa4
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/connection.py
@@ -0,0 +1,211 @@
+import queue
+import re
+import time
+from logging import getLogger
+from queue import Queue
+from typing import Optional, Union
+
+logger = getLogger(__name__)
+
+
+class MicroPythonConnection:
+ """Utility class for using Serial or WebSocket connection
+
+ Uses background thread to read from the source as soon as possible
+ to avoid loss of data (because buffer overflow or the device discarding
+ unread data).
+
+ Allows unreading data.
+ """
+
+ def __init__(self):
+ self.encoding = "utf-8"
+ self._read_queue = Queue() # populated by reader thread
+ self._read_buffer = bytearray() # used for unreading and postponing bytes
+ self.num_bytes_received = 0
+ self.startup_time = time.time()
+ self.text_mode = True
+ self._error = None
+ self._reader_stopped = False
+
+ def soft_read(self, size: int, timeout: float = 1) -> bytes:
+ return self.read(size, timeout, True)
+
+ def read(self, size: int, timeout: float = 10, timeout_is_soft: bool = False) -> bytes:
+ if timeout == 0:
+ if timeout_is_soft:
+ return b""
+ else:
+ raise ReadingTimeoutError(read_bytes=b"")
+
+ timer = TimeHelper(timeout)
+
+ while len(self._read_buffer) < size:
+ self._check_for_error()
+
+ try:
+ self._read_buffer.extend(self._read_queue.get(True, timer.time_left))
+ except queue.Empty as e:
+ if timeout_is_soft:
+ return b""
+ else:
+ logger.error(
+ "Could not read expected %s bytes in %s seconds. Bytes read: %r",
+ size,
+ timeout,
+ self._read_buffer,
+ )
+ raise ReadingTimeoutError(read_bytes=self._read_buffer) from e
+
+ try:
+ data = self._read_buffer[:size]
+ return data
+ finally:
+ del self._read_buffer[:size]
+
+ def soft_read_until(self, terminator, timeout: float = 1000000) -> bytes:
+ return self.read_until(terminator, timeout, timeout_is_soft=True)
+
+ def read_until(
+ self,
+ terminator: Union[bytes, re.Pattern],
+ timeout: float = 1000000,
+ timeout_is_soft: bool = False,
+ ) -> bytes:
+ timer = TimeHelper(timeout)
+
+ if isinstance(terminator, bytes):
+ terminator = re.compile(re.escape(terminator))
+
+ assert isinstance(terminator, re.Pattern)
+
+ while True:
+ self._check_for_error()
+
+ match = re.search(terminator, self._read_buffer)
+ if match:
+ break
+
+ try:
+ data = self._read_queue.get(True, timer.time_left)
+ # print("RR", repr(data), file=sys.stderr)
+ assert len(data) > 0
+ self._read_buffer.extend(data)
+ except queue.Empty:
+ if timeout_is_soft:
+ break
+ else:
+ raise ReadingTimeoutError(read_bytes=self._read_buffer)
+
+ if match:
+ size = match.end()
+ else:
+ assert timeout_is_soft
+ size = len(self._read_buffer)
+
+ data = self._read_buffer[:size]
+ del self._read_buffer[:size]
+ return data
+
+ def _fetch_to_buffer(self) -> None:
+ while not self._read_queue.empty():
+ self._read_buffer.extend(self._read_queue.get(True))
+
+ def read_all(self, check_error: bool = True) -> bytes:
+ self._fetch_to_buffer()
+
+ if len(self._read_buffer) == 0 and check_error:
+ self._check_for_error()
+
+ try:
+ return self._read_buffer
+ finally:
+ self._read_buffer = bytearray()
+
+ def read_all_expected(self, expected: bytes, timeout: float = None) -> bytes:
+ actual = self.read(len(expected), timeout=timeout)
+ actual += self.read_all()
+ assert expected == actual, "Expected %r, got %r" % (expected, actual)
+ return actual
+
+ def _check_for_error(self) -> None:
+ if self._error is None:
+ return
+
+ raise ConnectionError(self._error)
+
+ def unread(self, data: bytes) -> None:
+ if not data:
+ return
+
+ if isinstance(data, str):
+ data = data.encode(self.encoding)
+ elif isinstance(data, bytes):
+ data = bytearray(data)
+
+ self._read_buffer = data + self._read_buffer
+
+ def write(self, data: bytes) -> int:
+ """Writing"""
+ raise NotImplementedError()
+
+ def _log_data(self, data: bytes) -> None:
+ print(
+ data.decode(self.encoding, errors="replace")
+ .replace("\r\n", "\n")
+ .replace("\x01", "â‘ ")
+ .replace("\x02", "â‘¡")
+ .replace("\x03", "â‘¢")
+ .replace("\x04", "â‘£"),
+ end="",
+ )
+
+ def _make_output_available(self, data: bytes, block: bool = True) -> None:
+ # self._log_data(data)
+ if data:
+ self._read_queue.put(data, block=block)
+ self.num_bytes_received += len(data)
+
+ def incoming_is_empty(self) -> bool:
+ return self._read_queue.empty() and len(self._read_buffer) == 0
+
+ def outgoing_is_empty(self) -> bool:
+ return True
+
+ def buffers_are_empty(self) -> bool:
+ return self.incoming_is_empty() and self.outgoing_is_empty()
+
+ def set_text_mode(self, value: bool) -> None:
+ self.text_mode = value
+
+ def stop_reader(self) -> None:
+ self._reader_stopped = True
+ self._read_queue = Queue()
+ self._read_buffer = bytearray()
+
+ def close(self) -> None:
+ raise NotImplementedError()
+
+
+class ConnectionFailedException(ConnectionError):
+ pass
+
+
+class ReadingTimeoutError(TimeoutError):
+ def __init__(self, read_bytes: bytes):
+ super().__init__(f"Read bytes: {read_bytes!r}")
+ self.read_bytes = read_bytes
+
+
+class TimeHelper:
+ def __init__(self, time_allowed):
+ self.start_time = time.time()
+ self.time_allowed = time_allowed
+
+ @property
+ def time_spent(self):
+ return time.time() - self.start_time
+
+ @property
+ def time_left(self):
+ return max(self.time_allowed - self.time_spent, 0)
diff --git a/thonny/vendored_libs/pipkin/parser.py b/thonny/vendored_libs/pipkin/parser.py
new file mode 100644
index 000000000..3f0c3044a
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/parser.py
@@ -0,0 +1,325 @@
+import argparse
+import sys
+from typing import Any, List, Optional
+
+from pipkin import __version__
+
+
+def parse_arguments(raw_args: Optional[List[str]] = None) -> Any:
+ if raw_args is None:
+ raw_args = sys.argv[1:]
+
+ main_parser = argparse.ArgumentParser(
+ description="Tool for managing MicroPython and CircuitPython packages",
+ allow_abbrev=False,
+ add_help=False,
+ )
+
+ general_group = main_parser.add_argument_group(title="general")
+
+ general_group.add_argument(
+ "-h",
+ "--help",
+ help="Show this help message and exit",
+ action="help",
+ )
+ general_group.add_argument(
+ "-V",
+ "--version",
+ help="Show program version and exit",
+ action="version",
+ version=__version__,
+ )
+ verbosity_group = general_group.add_mutually_exclusive_group()
+ verbosity_group.add_argument(
+ "-v",
+ "--verbose",
+ help="Show more details about the process",
+ action="store_true",
+ )
+ verbosity_group.add_argument(
+ "-q",
+ "--quiet",
+ help="Don't show non-error output",
+ action="store_true",
+ )
+
+ connection_group = main_parser.add_argument_group(
+ title="target selection (pick one or let pipkin autodetect the port or mount)"
+ )
+ connection_exclusive_group = connection_group.add_mutually_exclusive_group()
+
+ connection_exclusive_group.add_argument(
+ "-p",
+ "--port",
+ help="Serial port of the target device",
+ metavar="",
+ )
+ connection_exclusive_group.add_argument(
+ "-m",
+ "--mount",
+ help="Mount point (volume, disk, drive) of the target device",
+ metavar="",
+ )
+ connection_exclusive_group.add_argument(
+ "-d",
+ "--dir",
+ help="Directory in the local filesystem",
+ metavar="",
+ )
+ # connection_exclusive_group.add_argument(
+ # "-e",
+ # "--exe",
+ # help="Interpreter executable (Unix or Windows port)",
+ # metavar="",
+ # )
+
+ # sub-parsers
+ subparsers = main_parser.add_subparsers(
+ title="commands",
+ description='Use "pipkin -h" for usage help of a command ',
+ dest="command",
+ required=True,
+ )
+
+ install_parser = subparsers.add_parser(
+ "install",
+ help="Install packages.",
+ description="Installs upip or pip compatible distribution packages onto "
+ "a MicroPython/CircuitPython device or into a local directory.",
+ )
+
+ uninstall_parser = subparsers.add_parser("uninstall", help="Uninstall packages.")
+ list_parser = subparsers.add_parser("list", help="List installed packages.")
+ show_parser = subparsers.add_parser(
+ "show", help="Show information about one or more installed packages."
+ )
+ freeze_parser = subparsers.add_parser(
+ "freeze", help="Output installed packages in requirements format."
+ )
+ _check_parser = subparsers.add_parser(
+ "check", help="Verify installed packages have compatible dependencies."
+ )
+ download_parser = subparsers.add_parser("download", help="Download packages.")
+ wheel_parser = subparsers.add_parser(
+ "wheel", help="Build Wheel archives for your requirements and dependencies."
+ )
+ cache_parser = subparsers.add_parser("cache", help="Inspect and manage pipkin cache.")
+
+ # common options
+ for parser in [install_parser, download_parser, wheel_parser]:
+ parser.add_argument(
+ "specs",
+ help="Package specification, eg. 'micropython-os' or 'micropython-os>=0.6'",
+ nargs="*",
+ metavar="",
+ )
+
+ specs_group = parser.add_argument_group(title="package selection")
+
+ specs_group.add_argument(
+ "-r",
+ "--requirement",
+ help="Install from the given requirements file.",
+ nargs="*",
+ dest="requirement_files",
+ metavar="",
+ default=[],
+ )
+ specs_group.add_argument(
+ "-c",
+ "--constraint",
+ help="Constrain versions using the given constraints file.",
+ nargs="*",
+ dest="constraint_files",
+ metavar="",
+ default=[],
+ )
+ specs_group.add_argument(
+ "--no-deps",
+ help="Don't install package dependencies.",
+ action="store_true",
+ )
+ specs_group.add_argument(
+ "--pre",
+ help="Include pre-release and development versions. By default, pipkin only finds stable versions.",
+ action="store_true",
+ )
+
+ # index-related
+ for parser in [install_parser, download_parser, wheel_parser, list_parser]:
+ index_group = parser.add_argument_group(title="index selection")
+ index_group.add_argument(
+ "-i",
+ "--index-url",
+ help="Base URL of the Python Package Index (default https://pypi.org/simple).",
+ metavar="",
+ )
+ index_group.add_argument(
+ "--extra-index-url",
+ help="Extra URLs of package indexes to use in addition to --index-url.",
+ nargs="*",
+ dest="extra_index_urls",
+ default=[],
+ metavar="",
+ )
+ index_group.add_argument(
+ "--no-index",
+ help="Ignore package index (only looking at --find-links URLs instead).",
+ action="store_true",
+ )
+ index_group.add_argument(
+ "--no-mp-org",
+ help="Don't let micropython.org/pi override other indexes.",
+ action="store_true",
+ )
+ index_group.add_argument(
+ "-f",
+ "--find-links",
+ help="If a URL or path to an html file, then parse for links to archives such as sdist "
+ "(.tar.gz) or wheel (.whl) files. If a local path or "
+ "file:// URL that's a directory, then look for archives in the directory listing.",
+ metavar="",
+ )
+
+ for parser in [uninstall_parser, show_parser]:
+ parser.add_argument(
+ "packages",
+ help="Package name",
+ nargs="*",
+ metavar="",
+ )
+
+ for parser in [list_parser, freeze_parser]:
+ # parser.add_argument(
+ # "--user",
+ # help="Only output packages installed in user-site. Relevant with Unix and Windows ports",
+ # action="store_true",
+ # )
+ # parser.add_argument(
+ # "--path",
+ # help="Restrict to the specified installation path for listing packages.",
+ # nargs="*",
+ # dest="paths",
+ # metavar="",
+ # default=[],
+ # )
+ parser.add_argument(
+ "--exclude",
+ help="Exclude specified package from the output.",
+ nargs="*",
+ dest="excludes",
+ metavar="",
+ default=[],
+ )
+
+ # install_parser.add_argument(
+ # "-t",
+ # "--target",
+ # help="Target directory in the target filesystem (eg. /lib)",
+ # metavar="",
+ # )
+ # install_parser.add_argument(
+ # "--user",
+ # help="Install to the Python user install directory for target platform. "
+ # "Only relevant with Unix and Windows ports",
+ # action="store_true",
+ # )
+ install_parser.add_argument(
+ "-U",
+ "--upgrade",
+ help="Upgrade all specified packages to the newest available version. "
+ "The handling of dependencies depends on the upgrade-strategy used.",
+ action="store_true",
+ )
+ install_parser.add_argument(
+ "--upgrade-strategy",
+ help="Determines how dependency upgrading should be handled [default: only-if-needed].\n"
+ "'eager' - dependencies are upgraded regardless of whether the currently installed "
+ "version satisfies the requirements of the upgraded package(s).\n"
+ "'only-if-needed' - are upgraded only when they do not satisfy the requirements of the "
+ "upgraded package(s).",
+ choices=["only-if-needed", "eager"],
+ default="only-if-needed",
+ metavar="",
+ )
+ install_parser.add_argument(
+ "--force-reinstall",
+ help="Reinstall all packages even if they are already up-to-date.",
+ action="store_true",
+ )
+ install_parser.add_argument(
+ "--compile",
+ help="Compile and install mpy files.",
+ action="store_true",
+ )
+
+ uninstall_parser.add_argument(
+ "-r",
+ "--requirement",
+ help="Uninstall all the packages listed in the given requirements file.",
+ nargs="*",
+ dest="requirement_files",
+ metavar="",
+ default=[],
+ )
+
+ uninstall_parser.add_argument(
+ "-y",
+ "--yes",
+ help="Don't ask for confirmation of uninstall deletions.",
+ action="store_true",
+ )
+
+ list_parser.add_argument(
+ "-o",
+ "--outdated",
+ help="List outdated packages",
+ action="store_true",
+ )
+ list_parser.add_argument(
+ "-u",
+ "--uptodate",
+ help="List uptodate packages",
+ action="store_true",
+ )
+ list_parser.add_argument(
+ "--pre",
+ help="Also consider pre-release and development versions when deciding whether package is outdated or uptodate.",
+ action="store_true",
+ )
+ list_parser.add_argument(
+ "--not-required",
+ help="List packages that are not dependencies of installed packages.",
+ action="store_true",
+ )
+ list_parser.add_argument(
+ "--format",
+ help="Select the output format among: columns (default), freeze, or json",
+ choices=["columns", "freeze", "json"],
+ default="columns",
+ metavar="",
+ )
+
+ download_parser.add_argument(
+ "-d",
+ "--dest",
+ help="Download packages into . Default: current directory.",
+ default=".",
+ metavar="",
+ )
+
+ wheel_parser.add_argument(
+ "-w",
+ "--wheel-dir",
+ help="Build wheels into , where the default is the current working directory.",
+ default=".",
+ metavar="",
+ )
+
+ cache_parser.add_argument("cache_command", choices=["dir", "info", "list", "purge"])
+
+ args = main_parser.parse_args(args=raw_args)
+
+ # print("ARGS", args)
+ return args
diff --git a/thonny/vendored_libs/pipkin/proxy.py b/thonny/vendored_libs/pipkin/proxy.py
new file mode 100644
index 000000000..7777563de
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/proxy.py
@@ -0,0 +1,438 @@
+#!/usr/bin/env python3
+"""
+MIT License
+
+Copyright (c) 2022 Aivar Annamaa
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+"""
+import copy
+import email.parser
+import errno
+import io
+import json
+import logging
+import shlex
+import tarfile
+import threading
+from html.parser import HTMLParser
+from http.server import BaseHTTPRequestHandler, HTTPServer
+from socketserver import BaseServer
+from textwrap import dedent
+from typing import Dict, List, Optional, Tuple
+from urllib.error import HTTPError
+from urllib.request import urlopen
+
+from pipkin.common import UserError
+
+MP_ORG_INDEX = "https://micropython.org/pi"
+PYPI_SIMPLE_INDEX = "https://pypi.org/simple"
+SERVER_ENCODING = "utf-8"
+
+# For efficient caching it's better if the proxy always runs at the same port
+PREFERRED_PORT = 36628
+
+logger = logging.getLogger(__name__)
+
+
+def shlex_join(split_command):
+ """Return a shell-escaped string from *split_command*."""
+ return " ".join(shlex.quote(arg) for arg in split_command)
+
+
+class SimpleUrlsParser(HTMLParser):
+ def error(self, message):
+ pass
+
+ def __init__(self):
+ self._current_tag: str = ""
+ self._current_attrs: List[Tuple[str, str]] = []
+ self.file_urls: Dict[str, str] = {}
+ super().__init__()
+
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, str]]) -> None:
+ self._current_tag = tag
+ self._current_attrs = attrs
+
+ def handle_data(self, data: str) -> None:
+ if self._current_tag == "a":
+ for att, val in self._current_attrs:
+ if att == "href":
+ self.file_urls[data] = val
+
+ def handle_endtag(self, tag):
+ pass
+
+
+class BaseIndexDownloader:
+ def __init__(self, index_url: str):
+ self._index_url = index_url.rstrip("/")
+ self._file_urls_cache: Dict[str, Dict[str, str]] = {}
+
+ def get_file_urls(self, dist_name: str) -> Dict[str, str]:
+ if dist_name not in self._file_urls_cache:
+ self._file_urls_cache[dist_name] = self._download_file_urls(dist_name)
+
+ return self._file_urls_cache[dist_name]
+
+ def _download_file_urls(self, dist_name) -> Optional[Dict[str, str]]:
+ raise NotImplementedError()
+
+
+class SimpleIndexDownloader(BaseIndexDownloader):
+ def _download_file_urls(self, dist_name) -> Optional[Dict[str, str]]:
+ url = f"{self._index_url}/{dist_name}"
+ logger.info("Downloading file urls from simple index %s", url)
+
+ try:
+ with urlopen(url) as fp:
+ parser = SimpleUrlsParser()
+ parser.feed(fp.read().decode("utf-8"))
+ return parser.file_urls
+ except HTTPError as e:
+ if e.code == 404:
+ return None
+ else:
+ raise e
+
+
+class JsonIndexDownloader(BaseIndexDownloader):
+ def _download_file_urls(self, dist_name) -> Optional[Dict[str, str]]:
+ metadata_url = f"{self._index_url}/{dist_name}/json"
+ logger.info("Downloading file urls from json index at %s", metadata_url)
+
+ result = {}
+ try:
+ with urlopen(metadata_url) as fp:
+ data = json.load(fp)
+ releases = data["releases"]
+ for ver in releases:
+ for file in releases[ver]:
+ file_url = file["url"]
+ if "filename" in file:
+ file_name = file["filename"]
+ else:
+ # micropython.org/pi doesn't have it
+ file_name = file_url.split("/")[-1]
+ # may be missing micropython prefix
+ if not file_name.startswith(dist_name):
+ # Let's hope version part doesn't contain dashes
+ _, suffix = file_name.split("-")
+ file_name = dist_name + "-" + suffix
+ result[file_name] = file_url
+ except HTTPError as e:
+ if e.code == 404:
+ return None
+ else:
+ raise e
+ return result
+
+
+class PipkinProxy(HTTPServer):
+ def __init__(
+ self, no_mp_org: bool, index_url: Optional[str], extra_index_urls: List[str], port: int
+ ):
+ self._downloaders: List[BaseIndexDownloader] = []
+ self._downloaders_by_dist_name: Dict[str, BaseIndexDownloader] = {}
+ if not no_mp_org:
+ self._downloaders.append(JsonIndexDownloader(MP_ORG_INDEX))
+ self._downloaders.append(SimpleIndexDownloader(index_url or PYPI_SIMPLE_INDEX))
+ for url in extra_index_urls:
+ self._downloaders.append(SimpleIndexDownloader(url))
+ super().__init__(("", port), PipkinProxyHandler)
+
+ def get_downloader_for_dist(self, dist_name: str) -> Optional[BaseIndexDownloader]:
+ if dist_name not in self._downloaders_by_dist_name:
+ for downloader in self._downloaders:
+ file_urls = downloader.get_file_urls(dist_name)
+ if file_urls is not None:
+ self._downloaders_by_dist_name[dist_name] = downloader
+ break
+ else:
+ self._downloaders_by_dist_name[dist_name] = None
+
+ return self._downloaders_by_dist_name[dist_name]
+
+ def get_index_url(self) -> str:
+ return f"http://127.0.0.1:{self.server_port}"
+
+
+class PipkinProxyHandler(BaseHTTPRequestHandler):
+ def __init__(self, request: bytes, client_address: Tuple[str, int], server: BaseServer):
+ logger.debug("Creating new handler")
+ assert isinstance(server, PipkinProxy)
+ self.proxy: PipkinProxy = server
+ super().__init__(request, client_address, server)
+
+ def do_GET(self) -> None:
+ path = self.path.strip("/")
+ logger.debug("do_GET for %s", path)
+ if "/" in path:
+ assert path.count("/") == 1
+ self._serve_file(*path.split("/"))
+ else:
+ self._serve_distribution_page(path)
+
+ def _serve_distribution_page(self, dist_name: str) -> None:
+ logger.debug("Serving index page for %s", dist_name)
+ downloader = self.proxy.get_downloader_for_dist(dist_name)
+ if downloader is None:
+ self.send_response(404)
+ self.end_headers()
+ return
+
+ # TODO: check https://discuss.python.org/t/community-testing-of-packaging-tools-against-non-warehouse-indexes/13442
+
+ file_urls = downloader.get_file_urls(dist_name)
+ # logger.debug("File urls: %r", file_urls)
+ self.send_response(200)
+ self.send_header("Content-type", f"text/html; charset={SERVER_ENCODING}")
+ self.send_header("Cache-Control", "max-age=600, public")
+ self.end_headers()
+ self.wfile.write("\n".encode(SERVER_ENCODING))
+ for file_name in file_urls:
+ self.wfile.write(
+ f"{file_name}\n".encode(SERVER_ENCODING)
+ )
+ self.wfile.write("".encode(SERVER_ENCODING))
+
+ def _serve_file(self, dist_name: str, file_name: str):
+ logger.debug("Serving %s for %s", file_name, dist_name)
+
+ original_bytes = self._download_file(dist_name, file_name)
+ tweaked_bytes = self._tweak_file(dist_name, file_name, original_bytes)
+
+ self.send_response(200)
+ self.send_header("Content-Type", "application/octet-stream")
+ self.send_header("Cache-Control", "max-age=365000000, immutable, public")
+ self.end_headers()
+
+ block_size = 4096
+ for start_index in range(0, len(tweaked_bytes), block_size):
+ block = tweaked_bytes[start_index : start_index + block_size]
+ self.wfile.write(block)
+
+ def _download_file(self, dist_name: str, file_name: str) -> bytes:
+ downloader = self.proxy.get_downloader_for_dist(dist_name)
+ assert downloader is not None
+
+ urls = downloader.get_file_urls(dist_name)
+ assert urls
+
+ assert file_name in urls
+ url = urls[file_name]
+ logger.debug("Downloading file from %s", url)
+ with urlopen(url) as result:
+ logger.debug("Headers: %r", result.headers.items())
+ return result.read()
+
+ def _tweak_file(self, dist_name: str, file_name: str, original_bytes: bytes) -> bytes:
+ if not file_name.lower().endswith(".tar.gz"):
+ return original_bytes
+
+ # In case of upip packages (tar.gz-s without setup.py) reverse following process:
+ # https://github.com/micropython/micropython-lib/commit/3a6ab0b
+
+ in_tar = tarfile.open(fileobj=io.BytesIO(original_bytes), mode="r:gz")
+ out_buffer = io.BytesIO()
+ out_tar = tarfile.open(fileobj=out_buffer, mode="w:gz")
+
+ wrapper_dir = None
+ py_modules = []
+ packages = []
+ metadata_bytes = None
+ requirements = []
+ egg_info_path = None
+
+ for info in in_tar:
+ logger.debug("Processing %r (name:%r, isfile:%r)", info, info.name, info.isfile())
+ out_info = copy.copy(info)
+
+ if info.isdir():
+ content = None
+ else:
+ with in_tar.extractfile(info) as f:
+ content = f.read()
+
+ if "/" in info.name:
+ wrapper_dir, rel_name = info.name.split("/", maxsplit=1)
+ else:
+ assert info.isdir()
+ wrapper_dir, rel_name = info.name, ""
+
+ assert wrapper_dir.startswith(dist_name)
+
+ rel_name = rel_name.strip("/")
+ rel_segments = rel_name.split("/")
+
+ # collect information about the original tar
+ if rel_name == "setup.py":
+ logger.debug("The archive contains setup.py. No tweaks needed")
+ return original_bytes
+ elif ".egg-info" in rel_name:
+ if rel_name.endswith(".egg-info/PKG-INFO"):
+ egg_info_path = rel_name[: -len("/PKG-INFO")]
+ metadata_bytes = content
+ elif rel_name.endswith(".egg-info/requires.txt"):
+ requirements = content.decode("utf-8").strip().splitlines()
+ elif len(rel_segments) == 1:
+ # toplevel item outside of egg-info
+ if info.isfile() and rel_name.endswith(".py"):
+ # toplevel module
+ module_name = rel_name[: -len(".py")]
+ py_modules.append(module_name)
+ else:
+ if info.isdir():
+ # Assuming all toplevel directories represent packages.
+ packages.append(rel_name)
+ else:
+ # Assuming an item inside a subdirectory.
+ # If it's a py, it will be included together with containing package,
+ # otherwise it will be picked up by package_data wildcard expression.
+ if rel_segments[0] not in packages:
+ # directories may not have their own entry
+ packages.append(rel_segments[0])
+
+ # all existing files and dirs need to be added without changing
+ out_tar.addfile(out_info, io.BytesIO(content))
+
+ assert wrapper_dir
+ assert metadata_bytes
+
+ logger.debug("%s is optimized for upip. Re-constructing missing files", file_name)
+ logger.debug("py_modules: %r", py_modules)
+ logger.debug("packages: %r", packages)
+ logger.debug("requirements: %r", requirements)
+ metadata = self._parse_metadata(metadata_bytes)
+ logger.debug("metadata: %r", metadata)
+ setup_py = self._create_setup_py(metadata, py_modules, packages, requirements)
+ logger.debug("setup.py: %s", setup_py)
+
+ self._add_file_to_tar(wrapper_dir + "/setup.py", setup_py.encode("utf-8"), out_tar)
+ self._add_file_to_tar(wrapper_dir + "/PKG-INFO", metadata_bytes, out_tar)
+ self._add_file_to_tar(
+ wrapper_dir + "/setup.cfg",
+ b"""[egg_info]
+tag_build =
+tag_date = 0
+""",
+ out_tar,
+ )
+ self._add_file_to_tar(
+ wrapper_dir + "/" + egg_info_path + "/dependency_links.txt", b"\n", out_tar
+ )
+ self._add_file_to_tar(
+ wrapper_dir + "/" + egg_info_path + "/top_level.txt",
+ ("\n".join(packages + py_modules) + "\n").encode("utf-8"),
+ out_tar,
+ )
+
+ # TODO: recreate SOURCES.txt and test with data files
+
+ out_tar.close()
+
+ out_bytes = out_buffer.getvalue()
+
+ # with open("_temp.tar.gz", "wb") as fp:
+ # fp.write(out_bytes)
+
+ return out_bytes
+
+ def _add_file_to_tar(self, name: str, content: bytes, tar: tarfile.TarFile) -> None:
+ stream = io.BytesIO(content)
+ info = tarfile.TarInfo(name=name)
+ info.size = len(content)
+ tar.addfile(info, stream)
+
+ def _parse_metadata(self, metadata_bytes) -> Dict[str, str]:
+ metadata = email.parser.Parser().parsestr(metadata_bytes.decode("utf-8"))
+ return {
+ key: metadata.get(key)
+ for key in (
+ "Metadata-Version",
+ "Name",
+ "Version",
+ "Summary",
+ "Home-page",
+ "Author",
+ "Author-email",
+ "License",
+ )
+ }
+
+ def _create_setup_py(
+ self,
+ metadata: Dict[str, str],
+ py_modules: List[str],
+ packages: List[str],
+ requirements: List[str],
+ ) -> str:
+
+ src = dedent(
+ """
+ from setuptools import setup
+ setup (
+ """
+ ).lstrip()
+
+ for src_key, target_key in [
+ ("Name", "name"),
+ ("Version", "version"),
+ ("Summary", "description"),
+ ("Home-page", "url"),
+ ("Author", "author"),
+ ("Author-email", "author_email"),
+ ("License", "license"),
+ ]:
+ if src_key in metadata:
+ src += f" {target_key}={metadata[src_key]!r},\n"
+
+ if requirements:
+ src += f" install_requires={requirements!r},\n"
+
+ if py_modules:
+ src += f" py_modules={py_modules!r},\n"
+
+ if packages:
+ src += f" packages={packages!r},\n"
+
+ # include all other files as package data
+ src += " package_data={'*': ['*', '*/*', '*/*/*', '*/*/*/*', '*/*/*/*/*', '*/*/*/*/*/*', '*/*/*/*/*/*/*', '*/*/*/*/*/*/*/*']}\n"
+
+ src += ")\n"
+ return src
+
+
+def start_proxy(
+ no_mp_org: bool,
+ index_url: Optional[str],
+ extra_index_urls: List[str],
+) -> PipkinProxy:
+ try:
+ proxy = PipkinProxy(no_mp_org, index_url, extra_index_urls, PREFERRED_PORT)
+ except OSError as e:
+ if e.errno == errno.EADDRINUSE:
+ proxy = PipkinProxy(no_mp_org, index_url, extra_index_urls, 0)
+ else:
+ raise e
+
+ server_thread = threading.Thread(target=proxy.serve_forever)
+ server_thread.start()
+
+ return proxy
diff --git a/thonny/vendored_libs/pipkin/serial_connection.py b/thonny/vendored_libs/pipkin/serial_connection.py
new file mode 100644
index 000000000..205cc7287
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/serial_connection.py
@@ -0,0 +1,177 @@
+import pathlib
+import sys
+import threading
+import time
+from logging import getLogger
+from textwrap import dedent
+
+from .connection import ConnectionFailedException, MicroPythonConnection
+
+OUTPUT_ENQ = b"\x05"
+OUTPUT_ACK = b"\x06"
+NORMAL_PROMPT = b">>> "
+FIRST_RAW_PROMPT = b"raw REPL; CTRL-B to exit\r\n>"
+
+
+logger = getLogger(__name__)
+
+
+class SerialConnection(MicroPythonConnection):
+ def __init__(self, port, baudrate=115200, dtr=None, rts=None, skip_reader=False):
+
+ import serial
+ from serial.serialutil import SerialException
+
+ super().__init__()
+
+ try:
+ self._serial = serial.Serial(
+ port=None, baudrate=baudrate, timeout=None, write_timeout=2, exclusive=True
+ )
+ # Tweaking dtr and rts was proposed by
+ # https://github.com/thonny/thonny/pull/1187
+ # but in some cases it messes up communication.
+ # At the same time, in some cases it is required
+ # https://github.com/thonny/thonny/issues/1462
+ if dtr is not None:
+ logger.debug("Setting DTR to %s", dtr)
+ self._serial.dtr = dtr
+ if rts is not None:
+ logger.debug("Setting RTS to %s", rts)
+ self._serial.rts = rts
+
+ self._serial.port = port
+ logger.debug("Opening serial port %s", port)
+ self._serial.open()
+ except SerialException as error:
+ err_str = str(error)
+ if "FileNotFoundError" in err_str:
+ err_str = "port not found"
+ message = "Unable to connect to " + port + ": " + err_str
+
+ # TODO: check if these error codes also apply to Linux and Mac
+ if error.errno == 13 and sys.platform == "linux":
+ try:
+ group = pathlib.Path(self._serial.port).group()
+ except Exception:
+ logger.warning("Could not query group for '%s'", self._serial.port)
+ group = "dialoutfb"
+
+ # TODO: check if user already has this group
+ message += "\n\n" + dedent(
+ """\
+ Try adding yourself to the '{group}' group:
+ > sudo usermod -a -G {group}
+ (NB! You may need to reboot your system after this!)""".format(
+ group=group
+ )
+ )
+
+ elif "PermissionError" in message or "Could not exclusively lock" in message:
+ message += "\n\n" + dedent(
+ """\
+ If you have serial connection to the device from another program, then disconnect it there first."""
+ )
+
+ elif error.errno == 16:
+ message += "\n\n" + "Try restarting the device."
+
+ raise ConnectionFailedException(message) from error
+
+ if skip_reader:
+ self._reading_thread = None
+ else:
+ self._reading_thread = threading.Thread(target=self._listen_serial, daemon=True)
+ self._reading_thread.start()
+
+ def write(self, data: bytes) -> int:
+ size = self._serial.write(data)
+ # print(data.decode(), end="")
+ assert size == len(data)
+ return len(data)
+
+ def _listen_serial(self):
+ "NB! works in background thread"
+ try:
+ data = b""
+ while not self._reader_stopped:
+ data += self._serial.read(1) # To avoid busy loop
+ if len(data) == 0:
+ self._error = "EOF"
+ # print("LISTEN EOFFFFFFFFFF")
+ break
+ data += self._serial.read_all()
+ # logger.debug("GOT %r", data)
+
+ if data.endswith(OUTPUT_ENQ) and self.text_mode:
+ # Flow control.
+ logger.debug("Read ENQ, responding with ACK")
+ # Assuming connection is idle and it is safe to write in this thread
+ self._serial.write(OUTPUT_ACK)
+ self._serial.flush()
+ data = data[:-1]
+ continue
+
+ # don't publish incomplete utf-8 data
+ try:
+ if self.text_mode:
+ data.decode("utf-8") # testing if data decodes
+ to_be_published = data
+ data = b""
+ except UnicodeDecodeError as e:
+ if e.start == 0:
+ # Invalid start byte, ie. we have missed first byte(s) of the codepoint.
+ # No use of waiting, output everything
+ to_be_published = data
+ data = b""
+ else:
+ to_be_published = data[: e.start]
+ data = data[e.start :]
+
+ if to_be_published:
+ self._make_output_available(to_be_published)
+
+ except Exception as e:
+ self._error = str(e)
+
+ def incoming_is_empty(self):
+ return self._serial.in_waiting == 0 and super().incoming_is_empty()
+
+ def outgoing_is_empty(self):
+ return self._serial.out_waiting == 0
+
+ def close(self):
+ if self._serial is not None:
+ try:
+ self._serial.cancel_read()
+ if self._reading_thread:
+ self._reading_thread.join()
+ finally:
+ try:
+ self._serial.close()
+ self._serial = None
+ except Exception:
+ logger.exception("Couldn't close serial")
+
+
+class DifficultSerialConnection(SerialConnection):
+ """For hardening the communication protocol"""
+
+ def _make_output_available(self, data, block=True):
+
+ # output prompts in parts
+ if FIRST_RAW_PROMPT in data or NORMAL_PROMPT in data:
+ if FIRST_RAW_PROMPT in data:
+ start = data.find(FIRST_RAW_PROMPT)
+ end = start + len(FIRST_RAW_PROMPT)
+ else:
+ start = data.find(NORMAL_PROMPT)
+ end = start + len(NORMAL_PROMPT)
+
+ super()._make_output_available(data[: start + 1], block=block)
+ time.sleep(0.1)
+ super()._make_output_available(data[start + 1 : end - 1], block=block)
+ time.sleep(0.1)
+ super()._make_output_available(data[end - 1 :], block=block)
+ else:
+ super()._make_output_available(data, block=block)
diff --git a/thonny/vendored_libs/pipkin/session.py b/thonny/vendored_libs/pipkin/session.py
new file mode 100644
index 000000000..3012e4b4c
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/session.py
@@ -0,0 +1,714 @@
+import hashlib
+import json
+import os.path
+import platform
+import shlex
+import shutil
+import stat
+import subprocess
+import sys
+import urllib.request
+import venv
+from dataclasses import dataclass
+from logging import getLogger
+from typing import Dict, List, Optional, Set, Tuple
+from urllib.request import urlopen
+
+import filelock
+from filelock import BaseFileLock, FileLock
+from pipkin.adapters import Adapter
+from pipkin.common import UserError
+from pipkin.proxy import start_proxy
+from pipkin.util import (
+ get_base_executable,
+ get_user_cache_dir,
+ get_venv_executable,
+ get_venv_site_packages_path,
+ parse_meta_dir_name,
+)
+
+logger = getLogger(__name__)
+
+INITIAL_VENV_DISTS = ["pip", "setuptools", "pkg_resources", "wheel"]
+INITIAL_VENV_FILES = ["easy_install.py"]
+META_ENCODING = "utf-8"
+
+
+@dataclass(frozen=True)
+class DistInfo:
+ key: str
+ project_name: str
+ version: str
+ location: str
+
+
+class Session:
+ """
+ Allows performing several commands in row without releasing the venv.
+ """
+
+ def __init__(self, adapter: Adapter):
+ self._adapter = adapter
+ self._venv_lock: Optional[BaseFileLock] = None
+ self._venv_dir: Optional[str] = None
+ self._quiet = False
+
+ def install(
+ self,
+ specs: Optional[List[str]] = None,
+ requirement_files: Optional[List[str]] = None,
+ constraint_files: Optional[List[str]] = None,
+ pre: bool = False,
+ no_deps: bool = False,
+ no_mp_org: bool = False,
+ index_url: Optional[str] = None,
+ extra_index_urls: Optional[List[str]] = None,
+ no_index: bool = False,
+ find_links: Optional[str] = None,
+ target: Optional[str] = None,
+ user: bool = False,
+ upgrade: bool = False,
+ upgrade_strategy: str = "only-if-needed",
+ force_reinstall: bool = False,
+ compile: Optional[bool] = None,
+ mpy_cross: Optional[str] = None,
+ **_,
+ ):
+
+ if compile is None and mpy_cross:
+ compile = True
+
+ args = ["install", "--no-compile", "--use-pep517"]
+
+ if upgrade:
+ args.append("--upgrade")
+ if upgrade_strategy:
+ args += ["--upgrade-strategy", upgrade_strategy]
+ if force_reinstall:
+ args.append("--force-reinstall")
+
+ args += self._format_selection_args(
+ specs=specs,
+ requirement_files=requirement_files,
+ constraint_files=constraint_files,
+ pre=pre,
+ no_deps=no_deps,
+ )
+
+ self._populate_venv()
+ state_before = self._get_venv_state()
+ self._invoke_pip_with_index_args(
+ args,
+ no_mp_org=no_mp_org,
+ index_url=index_url,
+ extra_index_urls=extra_index_urls or [],
+ no_index=no_index,
+ find_links=find_links,
+ )
+ state_after = self._get_venv_state()
+
+ removed_meta_dirs = {name for name in state_before if name not in state_after}
+ assert not removed_meta_dirs
+
+ new_meta_dirs = {name for name in state_after if name not in state_before}
+ changed_meta_dirs = {
+ name
+ for name in state_after
+ if name in state_before and state_after[name] != state_before[name]
+ }
+
+ if new_meta_dirs or changed_meta_dirs:
+ self._report_progress("Starting to apply changes to the target.")
+
+ if target:
+ effective_target = target
+ elif user:
+ effective_target = self._adapter.get_user_packages_path()
+ else:
+ effective_target = self._adapter.get_default_target()
+
+ for meta_dir in changed_meta_dirs:
+ self._report_progress(f"Removing old version of {parse_meta_dir_name(meta_dir)[0]}")
+ # if target is specified by --target or --user, then don't touch anything
+ # besides corresponding directory, regardless of the sys.path and possible hiding
+ dist_name, _version = parse_meta_dir_name(meta_dir)
+ if target:
+ # pip doesn't remove old dist with --target unless --upgrade is given
+ if upgrade:
+ self._adapter.remove_dist(dist_name=dist_name, target=target)
+ elif user:
+ self._adapter.remove_dist(
+ dist_name=dist_name, target=self._adapter.get_user_packages_path()
+ )
+ else:
+ # remove the all installations of this dist, which would hide the new installation
+ self._adapter.remove_dist(
+ dist_name=dist_name, target=effective_target, above_target=True
+ )
+
+ for meta_dir in new_meta_dirs | changed_meta_dirs:
+ self._upload_dist_by_meta_dir(meta_dir, effective_target, compile, mpy_cross)
+
+ if new_meta_dirs or changed_meta_dirs:
+ self._report_progress("All changes applied.")
+
+ def uninstall(
+ self,
+ packages: Optional[List[str]] = None,
+ requirement_files: Optional[List[str]] = None,
+ yes: bool = False,
+ **_,
+ ):
+ args = ["uninstall"]
+ if yes:
+ args += ["--yes"]
+
+ for rf in requirement_files or []:
+ args += ["-r", rf]
+ for package in packages or []:
+ args.append(package)
+
+ self._populate_venv()
+ state_before = self._get_venv_state()
+ self._invoke_pip(args)
+ state_after = self._get_venv_state()
+
+ removed_meta_dirs = {name for name in state_before if name not in state_after}
+ if removed_meta_dirs:
+ self._report_progress("Starting to apply changes to the target.")
+
+ for meta_dir_name in removed_meta_dirs:
+ self._report_progress(f"Removing {parse_meta_dir_name(meta_dir_name)[0]}")
+ dist_name, _version = parse_meta_dir_name(meta_dir_name)
+ self._adapter.remove_dist(dist_name)
+
+ if removed_meta_dirs:
+ self._report_progress("All changes applied.")
+
+ def list(
+ self,
+ outdated: bool = False,
+ uptodate: bool = False,
+ not_required: bool = False,
+ pre: bool = False,
+ paths: Optional[List[str]] = None,
+ user: bool = False,
+ format: str = "columns",
+ no_mp_org: Optional[bool] = False,
+ index_url: Optional[str] = None,
+ extra_index_urls: Optional[List[str]] = None,
+ no_index: bool = False,
+ find_links: Optional[str] = None,
+ excludes: Optional[List[str]] = None,
+ **_,
+ ):
+
+ args = ["list"]
+
+ if outdated:
+ args.append("--outdated")
+ if uptodate:
+ args.append("--uptodate")
+ if not_required:
+ args.append("--not-required")
+ if pre:
+ args.append("--pre")
+ if format:
+ args += ["--format", format]
+
+ args += self._format_exclusion_args(excludes)
+
+ self._populate_venv(paths=paths, user=user)
+
+ self._invoke_pip_with_index_args(
+ args,
+ no_mp_org=no_mp_org,
+ index_url=index_url,
+ extra_index_urls=extra_index_urls,
+ no_index=no_index,
+ find_links=find_links,
+ )
+
+ def basic_list(self) -> Set[DistInfo]:
+ """
+ Allows listing without requiring the venv.
+ """
+ dists_by_name = self._adapter.list_dists()
+ result = set()
+ for name in dists_by_name:
+ meta_dir_name, location = dists_by_name[name]
+ name, version = parse_meta_dir_name(meta_dir_name)
+ result.add(DistInfo(key=name, project_name=name, version=version, location=location))
+
+ return result
+
+ def show(self, packages: List[str], **_):
+ self._populate_venv()
+ self._invoke_pip(["show"] + packages)
+
+ def freeze(
+ self,
+ paths: Optional[List[str]] = None,
+ user: bool = False,
+ excludes: Optional[List[str]] = None,
+ **_,
+ ):
+
+ args = ["freeze"]
+
+ args += self._format_exclusion_args(excludes)
+
+ self._populate_venv(paths=paths, user=user)
+ self._invoke_pip(args)
+
+ def check(self, **_):
+ self._populate_venv()
+ self._invoke_pip(["check"])
+
+ def download(
+ self,
+ specs: Optional[List[str]] = None,
+ requirement_files: Optional[List[str]] = None,
+ constraint_files: Optional[List[str]] = None,
+ pre: bool = False,
+ no_deps: bool = False,
+ no_mp_org: bool = False,
+ index_url: Optional[str] = None,
+ extra_index_urls: Optional[List[str]] = None,
+ no_index: bool = False,
+ find_links: Optional[str] = None,
+ dest: Optional[str] = None,
+ **_,
+ ):
+ args = ["download"]
+
+ if dest:
+ args += ["--dest", dest]
+
+ args += self._format_selection_args(
+ specs=specs,
+ requirement_files=requirement_files,
+ constraint_files=constraint_files,
+ pre=pre,
+ no_deps=no_deps,
+ )
+
+ self._populate_venv()
+ self._invoke_pip_with_index_args(
+ args,
+ no_mp_org=no_mp_org,
+ index_url=index_url,
+ extra_index_urls=extra_index_urls,
+ no_index=no_index,
+ find_links=find_links,
+ )
+
+ def wheel(
+ self,
+ specs: Optional[List[str]] = None,
+ requirement_files: Optional[List[str]] = None,
+ constraint_files: Optional[List[str]] = None,
+ pre: bool = False,
+ no_deps: bool = False,
+ no_mp_org: bool = False,
+ index_url: Optional[str] = None,
+ extra_index_urls: Optional[List[str]] = None,
+ no_index: bool = False,
+ find_links: Optional[str] = None,
+ wheel_dir: Optional[str] = None,
+ **_,
+ ):
+ args = ["wheel"]
+
+ if wheel_dir:
+ args += ["--wheel-dir", wheel_dir]
+
+ args += self._format_selection_args(
+ specs=specs,
+ requirement_files=requirement_files,
+ constraint_files=constraint_files,
+ pre=pre,
+ no_deps=no_deps,
+ )
+
+ self._populate_venv()
+ self._invoke_pip_with_index_args(
+ args,
+ no_mp_org=no_mp_org,
+ index_url=index_url,
+ extra_index_urls=extra_index_urls,
+ no_index=no_index,
+ find_links=find_links,
+ )
+
+ def cache(self, cache_command: str, **_) -> None:
+ if cache_command == "purge":
+ if os.path.exists(self._get_pipkin_cache_dir()):
+ shutil.rmtree(self._get_pipkin_cache_dir())
+ elif cache_command == "dir":
+ print(self._get_pipkin_cache_dir())
+ else:
+ self._invoke_pip(["cache", cache_command])
+
+ def close(self) -> None:
+ if self._venv_lock is not None:
+ # self._clear_venv()
+ self._venv_lock.release()
+
+ def _format_exclusion_args(self, excludes: Optional[List[str]]) -> List[str]:
+ args = []
+ for exclude in (excludes or []) + ["pip", "pkg_resources", "setuptools", "wheel"]:
+ args += ["--exclude", exclude]
+
+ return args
+
+ def _format_selection_args(
+ self,
+ specs: Optional[List[str]],
+ requirement_files: Optional[List[str]],
+ constraint_files: Optional[List[str]],
+ pre: bool,
+ no_deps: bool,
+ ):
+ args = []
+
+ for path in requirement_files or []:
+ args += ["-r", path]
+ for path in constraint_files or []:
+ args += ["-c", path]
+
+ if no_deps:
+ args.append("--no-deps")
+ if pre:
+ args.append("--pre")
+
+ args += specs or []
+
+ return args
+
+ def _upload_dist_by_meta_dir(
+ self, meta_dir_name: str, target: str, compile: bool, mpy_cross: Optional[str]
+ ) -> None:
+ self._report_progress(f"Copying {parse_meta_dir_name(meta_dir_name)[0]}", end="")
+ rel_record_path = os.path.join(meta_dir_name, "RECORD")
+ record_path = os.path.join(self._get_venv_site_packages_path(), rel_record_path)
+ assert os.path.exists(record_path)
+
+ target_record_lines = []
+
+ with open(record_path, encoding=META_ENCODING) as fp:
+ record_lines = fp.read().splitlines()
+
+ for line in record_lines:
+ rel_path = line.split(",")[0]
+ # don't consider files installed to e.g. bin-directory
+ if rel_path.startswith(".."):
+ continue
+
+ # don't consider absolute paths
+ if os.path.isabs(rel_path):
+ logger.warning("Skipping absolute path %s", rel_path)
+ continue
+
+ # only consider METADATA from meta dir
+ if rel_path.startswith(meta_dir_name) and os.path.basename(rel_path) != "METADATA":
+ continue
+
+ full_path = os.path.normpath(
+ os.path.join(self._get_venv_site_packages_path(), rel_path)
+ )
+
+ full_device_path = self._adapter.join_path(target, rel_path)
+
+ if full_path.endswith(".py") and compile:
+ self._compile_with_mpy_cross(
+ full_path, self._get_compiled_path(full_path), mpy_cross
+ )
+ # forget about the .py file
+ full_path = self._get_compiled_path(full_path)
+ full_device_path = self._get_compiled_path(full_device_path)
+ rel_path = self._get_compiled_path(rel_path)
+
+ with open(full_path, "rb") as source_fp:
+ content = source_fp.read()
+
+ if rel_path.startswith(meta_dir_name) and os.path.basename(rel_path) == "METADATA":
+ content = self._trim_metadata(content)
+
+ self._adapter.write_file(full_device_path, content)
+ self._report_progress(".", end="")
+ target_record_lines.append(self._adapter.normpath(rel_path) + ",,")
+
+ # add RECORD (without hashes)
+ target_record_lines.append(self._adapter.normpath(rel_record_path) + ",,")
+ full_device_record_path = self._adapter.join_path(target, rel_record_path)
+ self._adapter.write_file(
+ full_device_record_path, "\n".join(target_record_lines).encode(META_ENCODING)
+ )
+
+ # add linebreak for the report
+ self._report_progress("")
+
+ def _trim_metadata(self, content: bytes) -> bytes:
+ # TODO:
+ return content
+
+ def _get_compiled_path(self, source_path: str) -> str:
+ assert source_path.endswith(".py"), f"Source path: {source_path}"
+ return source_path[: -len(".py")] + ".mpy"
+
+ def _ensure_venv(self) -> None:
+ if self._venv_lock is not None:
+ return
+
+ self._venv_lock, self._venv_dir = self._prepare_venv()
+
+ def _prepare_venv(self) -> Tuple[BaseFileLock, str]:
+ # 1. create sample venv (if it doesn't exist yet)
+ # 2. clone the venv for this session (Too slow in Windows ???)
+ # https://github.com/edwardgeorge/virtualenv-clone/blob/master/clonevirtualenv.py
+ path = self._compute_venv_path()
+ if not os.path.exists(path):
+ logger.info("Start preparing working environment at %s ...", path)
+ venv.main([path])
+ subprocess.check_call(
+ [
+ get_venv_executable(path),
+ "-I",
+ "-m",
+ "pip",
+ "--disable-pip-version-check",
+ "install",
+ "--no-warn-script-location",
+ "--upgrade",
+ "pip==22.0.*",
+ "setuptools==60.9.*",
+ "wheel==0.37.*",
+ ]
+ )
+ logger.info("Done preparing working environment.")
+ else:
+ logger.debug("Using existing working environment at %s", path)
+
+ lock = FileLock(os.path.join(path, "pipkin.lock"))
+ try:
+ lock.acquire(timeout=0)
+ except filelock.Timeout:
+ raise UserError(
+ "Could not get exclusive access to the working environment. "
+ "Is there another pipkin instance running?"
+ )
+
+ return lock, path
+
+ def _get_venv_site_packages_path(self) -> str:
+ return get_venv_site_packages_path(self._venv_dir)
+
+ def _clear_venv(self) -> None:
+ sp_path = self._get_venv_site_packages_path()
+ for name in os.listdir(sp_path):
+ full_path = os.path.join(sp_path, name)
+ if self._is_initial_venv_item(name):
+ continue
+ elif os.path.isfile(full_path):
+ os.remove(full_path)
+ else:
+ assert os.path.isdir(full_path)
+ shutil.rmtree(full_path)
+
+ def _populate_venv(self, paths: Optional[List[str]] = None, user: bool = False) -> None:
+ """paths and user should be used only with list and freeze commands"""
+ self._ensure_venv()
+ # TODO: try to re-use the state from the previous command executed in the same session.
+ assert not (paths and user)
+ if user:
+ effective_paths = [self._adapter.get_user_packages_path()]
+ else:
+ effective_paths = paths
+ self._clear_venv()
+ dist_infos = self._adapter.list_dists(effective_paths)
+ for name in dist_infos:
+ meta_dir_name, original_path = dist_infos[name]
+ self._prepare_dummy_dist(meta_dir_name, original_path)
+
+ def _prepare_dummy_dist(self, meta_dir_name: str, original_path: str) -> None:
+ sp_path = self._get_venv_site_packages_path()
+ meta_path = os.path.join(sp_path, meta_dir_name)
+ os.mkdir(meta_path, 0o755)
+
+ for name in ["METADATA"]:
+ content = self._read_dist_meta_file(meta_dir_name, name, original_path)
+ with open(os.path.join(meta_path, name), "bw") as meta_fp:
+ meta_fp.write(content)
+
+ # INSTALLER is mandatory according to https://www.python.org/dev/peps/pep-0376/
+ with open(os.path.join(meta_path, "INSTALLER"), "w", encoding="utf-8") as installer_fp:
+ installer_fp.write("pip\n")
+
+ # create dummy RECORD
+ with open(os.path.join(meta_path, "RECORD"), "w", encoding=META_ENCODING) as record_fp:
+ for name in ["METADATA", "INSTALLER", "RECORD"]:
+ record_fp.write(f"{meta_dir_name}/{name},,\n")
+
+ def _read_dist_meta_file(
+ self, meta_dir_name: str, file_name: str, original_container_path: str
+ ) -> bytes:
+ # TODO: add cache
+ path = self._adapter.join_path(original_container_path, meta_dir_name, file_name)
+ return self._adapter.read_file(path)
+
+ def _compute_venv_path(self) -> str:
+ try:
+ # try to share the pip-execution-venv among all pipkin-running-venvs created from
+ # same base executable
+ exe = get_base_executable()
+ except Exception:
+ exe = sys.executable
+
+ venv_name = hashlib.md5(str((exe, sys.version_info[0:2])).encode("utf-8")).hexdigest()
+ return os.path.join(self._get_workspaces_dir(), venv_name)
+
+ def _get_workspaces_dir(self) -> str:
+ return os.path.join(self._get_pipkin_cache_dir(), "workspaces")
+
+ def _get_pipkin_cache_dir(self) -> str:
+ result = os.path.join(get_user_cache_dir(), "pipkin")
+ if sys.platform == "win32":
+ # Windows doesn't have separate user cache dir
+ result = os.path.join(result, "cache")
+ return result
+
+ def _is_initial_venv_item(self, name: str) -> bool:
+ return (
+ name in INITIAL_VENV_FILES
+ or name in INITIAL_VENV_DISTS
+ or name.endswith(".dist-info")
+ and name.split("-")[0] in INITIAL_VENV_DISTS
+ )
+
+ def _get_venv_state(self, root: str = None) -> Dict[str, float]:
+ """Returns mapping from meta_dir names to modification timestamps of METADATA files"""
+ if root is None:
+ root = self._get_venv_site_packages_path()
+
+ result = {}
+ for item_name in os.listdir(root):
+ if self._is_initial_venv_item(item_name):
+ continue
+
+ if item_name.endswith(".dist-info"):
+ metadata_full_path = os.path.join(root, item_name, "METADATA")
+ assert os.path.exists(metadata_full_path)
+ result[item_name] = os.stat(metadata_full_path).st_mtime
+
+ return result
+
+ def _invoke_pip_with_index_args(
+ self,
+ pip_args: List[str],
+ no_mp_org: bool,
+ index_url: str,
+ extra_index_urls: List[str],
+ no_index: bool,
+ find_links: Optional[str],
+ ):
+
+ if no_index:
+ assert find_links
+ self._invoke_pip(pip_args + ["--no-index", "--find-links", find_links])
+ else:
+ proxy = start_proxy(no_mp_org, index_url, extra_index_urls)
+ logger.info("Using PipkinProxy at %s", proxy.get_index_url())
+
+ index_args = ["--index-url", proxy.get_index_url()]
+ if find_links:
+ index_args += ["--find-links", find_links]
+
+ try:
+ self._invoke_pip(pip_args + index_args)
+ finally:
+ proxy.shutdown()
+
+ def _invoke_pip(self, args: List[str]) -> None:
+ pip_cmd = [
+ get_venv_executable(self._venv_dir),
+ "-I",
+ "-m",
+ "pip",
+ "--disable-pip-version-check",
+ "--trusted-host",
+ "127.0.0.1",
+ ] + args
+ logger.debug("Calling pip: %s", " ".join(shlex.quote(arg) for arg in pip_cmd))
+
+ env = {key: os.environ[key] for key in os.environ if not key.startswith("PIP_")}
+ env["PIP_CACHE_DIR"] = self._get_pipkin_cache_dir()
+
+ subprocess.check_call(pip_cmd, env=env)
+
+ def _compile_with_mpy_cross(
+ self, source_path: str, target_path: str, mpy_cross_path: Optional[str]
+ ) -> None:
+ if mpy_cross_path is None:
+ mpy_cross_path = self._ensure_mpy_cross()
+
+ # user-provided executable is assumed to have been validated with proper error messages in main()
+ assert os.path.exists
+ assert os.access(mpy_cross_path, os.X_OK)
+ args = (
+ [mpy_cross_path] + self._adapter.get_mpy_cross_args() + ["-o", target_path, source_path]
+ )
+ subprocess.check_call(args)
+
+ def _ensure_mpy_cross(self) -> str:
+ impl_name, ver_prefix = self._adapter.get_implementation_name_and_version_prefix()
+ path = self._get_mpy_cross_path(impl_name, ver_prefix)
+ if not os.path.exists(path):
+ self._download_mpy_cross(impl_name, ver_prefix, path)
+ return path
+
+ def _download_mpy_cross(
+ self, implementation_name: str, version_prefix: str, target_path: str
+ ) -> None:
+ os.makedirs(os.path.dirname(target_path), exist_ok=True)
+ meta_url = f"https://raw.githubusercontent.com/aivarannamaa/pipkin/master/data/{implementation_name}-mpy-cross.json"
+ with urlopen(url=meta_url) as fp:
+ meta = json.load(fp)
+
+ if version_prefix not in meta:
+ raise UserError(f"Can't find mpy-cross for {implementation_name} {version_prefix}")
+
+ version_data = meta[version_prefix]
+
+ if sys.platform == "win32":
+ os_marker = "windows"
+ elif sys.platform == "darwin":
+ os_marker = "macos"
+ elif sys.platform == "linux":
+ os_marker = "linux"
+ else:
+ raise AssertionError(f"Unexpected sys.platform {sys.platform}")
+
+ full_marker = f"{os_marker}-{platform.machine()}"
+
+ if full_marker not in version_data:
+ raise UserError(
+ f"Can't find {full_marker} mpy-cross for {implementation_name} {version_prefix}"
+ )
+
+ download_url = version_data[full_marker]
+
+ urllib.request.urlretrieve(download_url, target_path)
+ os.chmod(target_path, os.stat(target_path).st_mode | stat.S_IEXEC)
+
+ def _get_mpy_cross_path(self, implementation_name: str, version_prefix: str) -> str:
+ basename = f"mpy-cross_{implementation_name}_{version_prefix}"
+ if sys.platform == "win32":
+ basename += ".exe"
+
+ return os.path.join(self._get_pipkin_cache_dir(), "mpy-cross", basename)
+
+ def _report_progress(self, msg: str, end="\n") -> None:
+ if not self._quiet:
+ print(msg, end=end)
+ sys.stdout.flush()
diff --git a/thonny/vendored_libs/pipkin/util.py b/thonny/vendored_libs/pipkin/util.py
new file mode 100644
index 000000000..644eaf6e8
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/util.py
@@ -0,0 +1,81 @@
+import os.path
+import subprocess
+import sys
+from typing import Tuple
+
+
+def get_windows_folder(ID: int) -> str:
+ # http://stackoverflow.com/a/3859336/261181
+ # http://www.installmate.com/support/im9/using/symbols/functions/csidls.htm
+ if sys.platform == "win32":
+ import ctypes.wintypes
+
+ SHGFP_TYPE_CURRENT = 0
+ buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
+ ctypes.windll.shell32.SHGetFolderPathW(0, ID, 0, SHGFP_TYPE_CURRENT, buf)
+ assert buf.value
+ return buf.value
+ else:
+ raise AssertionError("Meant to be used only on Windows")
+
+
+def get_windows_roaming_appdata_dir() -> str:
+ return get_windows_folder(26)
+
+
+def get_windows_local_appdata_dir() -> str:
+ return get_windows_folder(28)
+
+
+def get_user_cache_dir() -> str:
+ if sys.platform == "win32":
+ return os.path.join(get_windows_local_appdata_dir())
+ elif sys.platform == "darwin":
+ return os.path.expanduser("~/Library/Caches")
+ else:
+ return os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
+
+
+def get_base_executable():
+ if sys.exec_prefix == sys.base_exec_prefix:
+ return sys.executable
+
+ if sys.platform == "win32":
+ guess = sys.base_exec_prefix + "\\" + os.path.basename(sys.executable)
+ if os.path.isfile(guess):
+ return guess
+
+ if os.path.islink(sys.executable):
+ return os.path.realpath(sys.executable)
+
+ raise RuntimeError("Don't know how to locate base executable")
+
+
+def get_venv_executable(path: str) -> str:
+ if sys.platform == "win32":
+ return os.path.join(path, "Scripts", "python.exe")
+ else:
+ return os.path.join(path, "bin", "python3")
+
+
+def get_venv_site_packages_path(venv_path: str) -> str:
+ result = subprocess.check_output(
+ [get_venv_executable(venv_path), "-c", "import site; print(site.getsitepackages()[0])"],
+ text=True,
+ ).strip()
+ assert result.startswith(venv_path)
+ return result
+
+
+def parse_meta_dir_name(name: str) -> Tuple[str, str]:
+ assert name.endswith(".dist-info")
+ name, version = name[: -len(".dist-info")].split("-")
+ return name, version
+
+
+def starts_with_continuation_byte(data: bytes) -> bool:
+ return len(data) > 0 and is_continuation_byte(data[0])
+
+
+def is_continuation_byte(byte: int) -> bool:
+ return (byte & 0b11000000) == 0b10000000
diff --git a/thonny/vendored_libs/pipkin/webrepl_connection.py b/thonny/vendored_libs/pipkin/webrepl_connection.py
new file mode 100644
index 000000000..cec4b71e9
--- /dev/null
+++ b/thonny/vendored_libs/pipkin/webrepl_connection.py
@@ -0,0 +1,144 @@
+import sys
+import threading
+from logging import DEBUG, getLogger
+from queue import Queue
+
+from .connection import ConnectionFailedException, MicroPythonConnection
+
+logger = getLogger(__name__)
+
+
+class WebReplConnection(MicroPythonConnection):
+ """
+ Problem with block size:
+ https://github.com/micropython/micropython/issues/2497
+ Start with conservative delay.
+ Client may later reduce it for better efficiency
+ """
+
+ def __init__(self, url, password, num_bytes_received=0):
+
+ self.num_bytes_received = num_bytes_received
+ super().__init__()
+
+ try:
+ import websockets # @UnusedImport
+ except Exception:
+ print(
+ "Can't import `websockets`. You can install it via 'Tools => Manage plug-ins'.",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+ self._url = url
+ self._password = password
+ self._write_responses = Queue()
+
+ # Some tricks are needed to use async library in a sync program.
+ # Using thread-safe queues to communicate with async world in another thread
+ self._write_queue = Queue()
+ self._connection_result = Queue()
+ self._ws_thread = threading.Thread(target=self._wrap_ws_main, daemon=True)
+ self._ws_thread.start()
+
+ # Wait until connection was made
+ res = self._connection_result.get()
+ if res != "OK":
+ raise res
+
+ def _wrap_ws_main(self):
+ import asyncio
+
+ loop = asyncio.new_event_loop()
+ if logger.isEnabledFor(DEBUG):
+ loop.set_debug(True)
+ loop.run_until_complete(self._ws_main())
+
+ async def _ws_main(self):
+ import asyncio
+
+ try:
+ await self._ws_connect()
+ except Exception as e:
+ self._connection_result.put_nowait(e)
+ return
+
+ self._connection_result.put_nowait("OK")
+ await asyncio.gather(self._ws_keep_reading(), self._ws_keep_writing())
+
+ async def _ws_connect(self):
+ import websockets.exceptions
+
+ try:
+ try:
+ self._ws = await websockets.connect(self._url, ping_interval=None)
+ except websockets.exceptions.InvalidMessage:
+ # try once more
+ self._ws = await websockets.connect(self._url, ping_interval=None)
+ except OSError as e:
+ # print("\nCould not connect:", e, file=sys.stderr)
+ raise ConnectionFailedException(str(e)) from e
+ logger.debug("GOT WS: %r", self._ws)
+
+ # read password prompt and send password
+ read_chars = ""
+ logger.debug("Looking for password prompt")
+ while read_chars != "Password: ":
+ ch = await self._ws.recv()
+ read_chars += ch
+
+ logger.debug("Submitting password")
+ await self._ws.send(self._password + "\n")
+
+ async def _ws_keep_reading(self):
+ import websockets.exceptions
+
+ while not self._reader_stopped:
+ try:
+ data = await self._ws.recv()
+ if isinstance(data, str):
+ data = data.encode("UTF-8")
+ if len(data) == 0:
+ self._error = "EOF"
+ break
+ except websockets.exceptions.ConnectionClosedError:
+ # TODO: try to reconnect in case of Ctrl+D
+ self._error = "EOF"
+ break
+
+ self.num_bytes_received += len(data)
+ self._make_output_available(data, block=False)
+
+ async def _ws_keep_writing(self):
+ import asyncio
+
+ while True:
+ while not self._write_queue.empty():
+ data = self._write_queue.get(block=False)
+ if self.text_mode:
+ payload = data.decode("UTF-8")
+ else:
+ payload = data
+ await self._ws.send(payload)
+ # logger.debug("Wrote %r bytes", len(data))
+ self._write_responses.put(len(data))
+
+ # Allow reading loop to progress
+ await asyncio.sleep(0.01)
+
+ def write(self, data: bytes) -> int:
+ self._write_queue.put_nowait(data)
+ return self._write_responses.get()
+
+ async def _async_close(self):
+ await self._ws.close()
+
+ def close_and_return_new_connection(self):
+ self.close()
+ return WebReplConnection(self._url, self._password, self.num_bytes_received)
+
+ def close(self):
+ """
+ # TODO:
+ import asyncio
+ asyncio.get_event_loop().run_until_complete(self.async_close())
+ """