diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml
index 3995084e..30cd34c5 100644
--- a/.github/workflows/dist.yml
+++ b/.github/workflows/dist.yml
@@ -50,14 +50,14 @@ jobs:
       with:
         submodules: true
 
-    - uses: actions/setup-python@v4
-      with:
-        python-version: 3.8
-    - name: Sphinx
-      run: |
-        pip --disable-pip-version-check install -e .
-        pip --disable-pip-version-check install -r docs/requirements.txt
-        cd docs && make clean html SPHINXOPTS="-W --keep-going"
+  #   - uses: actions/setup-python@v5
+  #     with:
+  #       python-version: 3.8
+  #   - name: Sphinx
+  #     run: |
+  #       pip --disable-pip-version-check install -e .
+  #       pip --disable-pip-version-check install -r docs/requirements.txt
+  #       cd docs && make clean html SPHINXOPTS="-W --keep-going"
 
   build:
     runs-on: ubuntu-22.04
@@ -67,7 +67,7 @@ jobs:
         submodules: recursive
         fetch-depth: 0
 
-    - uses: actions/setup-python@v4
+    - uses: actions/setup-python@v5
       with:
         python-version: 3.8
 
@@ -97,12 +97,12 @@ jobs:
         - '3.11'
         - '3.12'
         - '3.13'
-        architecture: [x86, x64]
-        exclude:
-        - os: macos-13
-          architecture: x86
-        - os: ubuntu-22.04
-          architecture: x86
+        architecture: [x64]
+        # exclude:
+        # - os: macos-13
+        #   architecture: x86
+        # - os: ubuntu-22.04
+        #   architecture: x86
         include:
         - os: macos-14
           python_version: 3.9
@@ -116,7 +116,7 @@ jobs:
       with:
         submodules: true
 
-    - uses: actions/setup-python@v4
+    - uses: actions/setup-python@v5
       with:
         python-version: ${{ matrix.python_version }}
         architecture: ${{ matrix.architecture }}
@@ -133,25 +133,32 @@ jobs:
           key: ${{ matrix.os }}-${{ matrix.architecture }}-${{ matrix.python_version }}
           variant: ccache
 
+    - name: Setup MSVC
+      uses: bus1/cabuild/action/msdevshell@e22aba57d6e74891d059d66501b6b5aed8123c4d  # v1
+      with:
+        architecture: x64
+      if: runner.os == 'Windows'
+
+    - name: Configure ccache for MSVC
+      shell: bash
+      # https://github.com/mesonbuild/meson/issues/10423
+      run: |
+        echo "CC=ccache.exe cl" >> $GITHUB_ENV
+        echo "CXX=ccache.exe cl" >> $GITHUB_ENV
+      if: runner.os == 'Windows'
+
     - name: Install
       shell: bash
       working-directory: dist
       run: python -m pip --disable-pip-version-check install *.whl
 
     - name: Install test dependencies
-      shell: bash
       working-directory: tests
       run: python -m pip --disable-pip-version-check install -r requirements.txt
 
     - name: Test wheel
-      shell: bash
-      env:
-        RPYBUILD_PARALLEL: 1
-        RPYBUILD_STRIP_LIBPYTHON: 1
-        RPYBUILD_CC_LAUNCHER: ccache
       working-directory: tests
-      run: |
-        python run_tests.py
+      run: python run_tests.py
 
   cross-build:
     runs-on: ubuntu-latest
@@ -160,9 +167,8 @@ jobs:
       max-parallel: ${{ fromJSON(needs.setup_concurrency.outputs.max-parallel).v }}
       matrix:
         container:
-        - wpilib/roborio-cross-ubuntu:2024-22.04-py312
-        - wpilib/raspbian-cross-ubuntu:bullseye-22.04-py312
-        - wpilib/aarch64-cross-ubuntu:bullseye-22.04-py312
+        - wpilib/roborio-cross-ubuntu:2025-22.04-py313
+        - wpilib/raspbian-cross-ubuntu:2025-bookworm-22.04-py313
 
     container:
       image: "${{ matrix.container }}"
@@ -184,33 +190,23 @@ jobs:
         name: dist
         path: dist
 
-    - name: Install setuptools
-      run: |
-        /build/venv/bin/build-pip --disable-pip-version-check install setuptools
-        /build/venv/bin/cross-pip --disable-pip-version-check install setuptools
+    - name: Setup cross environment
+      uses: robotpy/build-actions/setup-cross-meson@semiwrap
 
     - name: Install
       working-directory: dist
       run: |
-        # install to both cross and build in case things get mixed up
         /build/venv/bin/build-pip --disable-pip-version-check install *.whl
-        /build/venv/bin/cross-pip --disable-pip-version-check install *.whl
 
     - name: Install test dependencies
       shell: bash
       working-directory: tests
       run: |
-        # install to both cross and build in case things get mixed up
         /build/venv/bin/build-pip --disable-pip-version-check install -r requirements.txt
-        /build/venv/bin/cross-pip --disable-pip-version-check install -r requirements.txt
 
     - name: Build cross wheel
-      env:
-        RPYBUILD_PARALLEL: 1
-        RPYBUILD_STRIP_LIBPYTHON: 1
-        RPYBUILD_CC_LAUNCHER: ccache
       working-directory: tests/cpp
-      run: /build/venv/bin/cross-python run_install.py wheel
+      run: /build/venv/bin/cross-python run_install.py --config-settings=setup-args=--cross-file=cross.txt
 
   publish:
     runs-on: ubuntu-latest
diff --git a/.gitignore b/.gitignore
index cc00707f..a28df6f5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,4 +7,4 @@ __pycache__
 /dist
 /build
 
-/robotpy_build/version.py
\ No newline at end of file
+/src/semiwrap/version.py
\ No newline at end of file
diff --git a/.gitmodules b/.gitmodules
index 8daf168a..5d872ea0 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +1,3 @@
-[submodule "robotpy_build/pybind11"]
-	path = robotpy_build/pybind11
+[submodule "src/semiwrap/pybind11"]
+	path = src/semiwrap/pybind11
 	url = https://github.com/pybind/pybind11.git
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index 0396ccfa..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,2 +0,0 @@
-recursive-include robotpy_build/pybind11/include *.h
-recursive-include robotpy_build/include *.h
\ No newline at end of file
diff --git a/README.md b/README.md
index 27a4e659..d44537ae 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-robotpy-build
-=============
+semiwrap
+========
 
 This is a build tool intended to be generally useful for any python project
 that has binary dependencies. It is especially designed to meet the needs
@@ -14,13 +14,15 @@ Requires Python 3.8+
 Documentation
 -------------
 
-Documentation can be found at https://robotpy-build.readthedocs.io/
+Documentation can be found at https://semiwrap.readthedocs.io/
 
 Author
 ------
 
-Dustin Spicuzza is the primary author of robotpy-build, but it is the
+Dustin Spicuzza is the primary author of semiwrap.
+
+Semiwrap is a direct decendant of the robotpy-build project, and is
 culmination of many years of experimentation with automated wrapper
 generation by members of the RobotPy community.
 
-robotpy-build is available under the BSD 3-clause license.
+semiwrap is available under the BSD 3-clause license.
diff --git a/docs/conf.py b/docs/conf.py
index c586ea3a..2c8c507e 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -15,7 +15,7 @@
 
 os.environ["GENERATING_DOCUMENTATION"] = "True"
 
-__version__ = pkg_resources.get_distribution("robotpy-build").version
+__version__ = pkg_resources.get_distribution("semiwrap").version
 
 
 # -- RTD configuration ------------------------------------------------
@@ -25,7 +25,7 @@
 
 # -- Project information -----------------------------------------------------
 
-project = "robotpy-build"
+project = "semiwrap"
 copyright = "2020, RobotPy Development Team"
 author = "RobotPy Development Team"
 
diff --git a/examples/demo/gen/.gitkeep b/examples/demo/gen/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/examples/demo/meson.build b/examples/demo/meson.build
new file mode 100644
index 00000000..66e956b7
--- /dev/null
+++ b/examples/demo/meson.build
@@ -0,0 +1,23 @@
+project('demo', ['cpp'],
+        default_options: ['warning_level=1', 'cpp_std=c++20',
+                          'b_colorout=auto', 'optimization=2', 'b_pie=true'])
+
+# Include autogenerated wrapcfg/meson.build
+subdir('wrapcfg')
+
+# Add additional source files to predefined variable in wrapcfg/meson.build
+demo_sources += files(
+  'swdemo/src/demo.cpp',
+  'swdemo/src/main.cpp',
+)
+
+# You can add extra compilation arguments by adding a dependency to predefined
+# variable
+demo_deps += [
+  declare_dependency(include_directories: ['swdemo/include'])
+]
+
+# Include autogenerated wrapcfg/modules/meson.build
+# - Builds the extension modules
+# - Generates the pyi file for the extension modules
+subdir('wrapcfg/modules')
diff --git a/examples/demo/pyproject.toml b/examples/demo/pyproject.toml
index 6bdf7fc3..f1509443 100644
--- a/examples/demo/pyproject.toml
+++ b/examples/demo/pyproject.toml
@@ -1,41 +1,38 @@
 
-# This section tells pip to install robotpy-build before starting a build
+# This section tells pip to install semiwrap before starting a build
 [build-system]
-requires = ["robotpy-build"]
-
-# Tells robotpy-build where to place autogenerated metadata
-[tool.robotpy-build]
-base_package = "rpydemo"
-
-# This section configures the 'rpydemo' python package. Multiple
-# sections are possible to build multiple packages
-[tool.robotpy-build.wrappers."rpydemo"]
-name = "rpydemo"
-
-# C++ source files to compile, path is relative to the root of the project
-sources = [
-    "rpydemo/src/demo.cpp",
-    "rpydemo/src/main.cpp"
-]
-
-# This is a directory that can be used to customize the autogenerated
-# C++ code
-# -> autogenerate those files via `robotpy-build create-gen`
-generation_data = "gen"
-
-# This tells robotpy-build to parse include/demo.h and autogenerate pybind11
-# wrappers for the contents of the header.
-# -> autogenerate this via `robotpy-build scan-headers`
-[tool.robotpy-build.wrappers."rpydemo".autogen_headers]
-demo = "demo.h"
-
-
-# Standard python package metadata
-[tool.robotpy-build.metadata]
-name = "robotpy-build-demo"
-description = "robotpy-build demo program"
-author = "RobotPy Development Team"
-author_email = "robotpy@googlegroups.com"
-url = "https://github.com/robotpy/robotpy-build"
-license = "BSD-3-Clause"
-install_requires = []
\ No newline at end of file
+build-backend = "hatchling.build"
+requires = ["semiwrap", "hatch-meson", "hatchling"]
+
+[project]
+name = "swdemo"
+description = "Demo program"
+version = "0.0.1"
+
+#
+# hatch-semiwrap configuration
+# .. this generates meson.build to perform autogen
+#
+
+[tool.hatch.build.hooks.semiwrap]
+# autogen_build_path = "autogen"
+
+#
+# hatch-meson configuration
+# .. this executes meson to build python extension modules
+#
+
+[tool.hatch.build.hooks.meson]
+
+
+#
+# semiwrap code generation configuration
+#
+
+[tool.semiwrap]
+
+[tool.semiwrap.extension_modules."swdemo._demo"]
+name = "demo"
+
+[tool.semiwrap.extension_modules."swdemo._demo".headers]
+demo = "include/demo.h"
diff --git a/examples/demo/rpydemo/src/main.cpp b/examples/demo/rpydemo/src/main.cpp
deleted file mode 100644
index c7e6095e..00000000
--- a/examples/demo/rpydemo/src/main.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-#include <rpygen_wrapper.hpp>
-
-RPYBUILD_PYBIND11_MODULE(m) { initWrapper(m); }
\ No newline at end of file
diff --git a/examples/demo/setup.py b/examples/demo/setup.py
deleted file mode 100755
index ea1ae2b4..00000000
--- a/examples/demo/setup.py
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env python3
-from robotpy_build.setup import setup
-
-setup()
diff --git a/examples/demo/rpydemo/__init__.py b/examples/demo/swdemo/__init__.py
similarity index 73%
rename from examples/demo/rpydemo/__init__.py
rename to examples/demo/swdemo/__init__.py
index 0641bb9a..671b581f 100644
--- a/examples/demo/rpydemo/__init__.py
+++ b/examples/demo/swdemo/__init__.py
@@ -1,4 +1,4 @@
 # autogenerated by 'robotpy-build create-imports rpydemo rpydemo._rpydemo'
-from ._rpydemo import DemoClass, add2
+from ._demo import DemoClass, add2
 
 __all__ = ["DemoClass", "add2"]
diff --git a/examples/demo/rpydemo/include/demo.h b/examples/demo/swdemo/include/demo.h
similarity index 89%
rename from examples/demo/rpydemo/include/demo.h
rename to examples/demo/swdemo/include/demo.h
index 0f351c50..29968add 100644
--- a/examples/demo/rpydemo/include/demo.h
+++ b/examples/demo/swdemo/include/demo.h
@@ -4,6 +4,8 @@
 /** Adds 2 to the first parameter and returns it */
 int add2(int x);
 
+namespace demo {
+
 /**
     Doxygen documentation is automatically added to your python objects
     when the bindings are autogenerated.
@@ -20,3 +22,5 @@ class DemoClass {
 private:
     int m_x = 0;
 };
+
+} // namespace demo
diff --git a/examples/demo/rpydemo/src/demo.cpp b/examples/demo/swdemo/src/demo.cpp
similarity index 78%
rename from examples/demo/rpydemo/src/demo.cpp
rename to examples/demo/swdemo/src/demo.cpp
index 6b4279a5..fd670f31 100644
--- a/examples/demo/rpydemo/src/demo.cpp
+++ b/examples/demo/swdemo/src/demo.cpp
@@ -5,10 +5,14 @@ int add2(int x) {
     return x + 2;
 }
 
+namespace demo {
+
 void DemoClass::setX(int x) {
     m_x = x;
 }
 
 int DemoClass::getX() const {
     return m_x;
-}
\ No newline at end of file
+}
+
+} // namespace demo
diff --git a/examples/demo/swdemo/src/main.cpp b/examples/demo/swdemo/src/main.cpp
new file mode 100644
index 00000000..96af8010
--- /dev/null
+++ b/examples/demo/swdemo/src/main.cpp
@@ -0,0 +1,5 @@
+#include <semiwrap_init.swdemo._demo.hpp>
+
+SEMIWRAP_PYBIND11_MODULE(m) {
+    initWrapper(m);
+}
\ No newline at end of file
diff --git a/examples/demo/wrapcfg/.gitignore b/examples/demo/wrapcfg/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/examples/demo/wrapcfg/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/examples/demo/wrapcfg/demo.yml b/examples/demo/wrapcfg/demo.yml
new file mode 100644
index 00000000..1a632401
--- /dev/null
+++ b/examples/demo/wrapcfg/demo.yml
@@ -0,0 +1,9 @@
+---
+
+functions:
+  add2:
+classes:
+  demo::DemoClass:
+    methods:
+      setX:
+      getX:
diff --git a/examples/demo/wrapcfg/modules/.gitignore b/examples/demo/wrapcfg/modules/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/examples/demo/wrapcfg/modules/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/examples/demo/wrapcfg/trampolines/.gitignore b/examples/demo/wrapcfg/trampolines/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/examples/demo/wrapcfg/trampolines/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/pyproject.toml b/pyproject.toml
index 0e00c638..2b5dac13 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,13 +3,13 @@ requires = ["hatchling>=1.26.2", "hatch-vcs"]
 build-backend = "hatchling.build"
 
 [project]
-name = "robotpy-build"
+name = "semiwrap"
 dynamic = ["version"]
-description = "Build tool for RobotPy projects"
+description = "Semi-automated tool to wrap C++ code for use by python"
 readme = "README.md"
 requires-python = ">=3.8"
 license = "BSD-3-Clause"
-license-files = ["LICENSE.txt", "robotpy_build/pybind11/LICENSE"]
+license-files = ["LICENSE.txt", "src/semiwrap/pybind11/LICENSE"]
 authors = [
     {name = "Dustin Spicuzza", email = "robotpy@googlegroups.com"},
 ]
@@ -18,62 +18,53 @@ classifiers = [
     "Intended Audience :: Developers",
     "License :: OSI Approved :: BSD License",
     "Programming Language :: Python :: 3 :: Only",
-    "Programming Language :: Python :: 3.8",
-    "Programming Language :: Python :: 3.9",
-    "Programming Language :: Python :: 3.10",
-    "Programming Language :: Python :: 3.11",
     "Topic :: Software Development",
 ]
 dependencies = [
-    "setuptools >= 45",
-    "setuptools_scm >= 6.2, < 8",
+    "pkgconf",
     "sphinxify >= 0.7.3",
-    "pydantic >= 1.7.0, < 2, != 1.10.20",
-    "cxxheaderparser[pcpp] ~= 1.4.1",
+    "validobj ~= 1.2",
+    "cxxheaderparser[pcpp] ~= 1.5",
     "tomli",
     "tomli_w",
     "toposort",
     "typing-extensions",
+    "validobj",
     "pyyaml >= 5.1",
-    "patch == 1.*",
     "pybind11-stubgen ~= 2.5.1",
     "delocate; platform_system == 'Darwin'",
     "distro; platform_system == 'Linux'",
 ]
 
-[project.entry-points.robotpybuild]
-robotpy-build = "robotpy_build.pkgcfg"
+[project.entry-points.hatch]
+semiwrap = "semiwrap.hooks"
+
+[project.entry-points.pkg_config]
+semiwrap = "semiwrap"
 
 [project.scripts]
-robotpy-build = "robotpy_build.tool:main"
+semiwrap = "semiwrap.tool:main"
 
 [project.urls]
-"Source code" = "https://github.com/robotpy/robotpy-build"
+"Source code" = "https://github.com/robotpy/semiwrap"
 
 [tool.hatch.version]
 source = "vcs"
 
-[tool.hatch.build.hooks.vcs]
-version-file = "robotpy_build/version.py"
+[tool.hatch.build.targets.sdist.hooks.vcs]
+version-file = "src/semiwrap/version.py"
 
 [tool.hatch.build.targets.sdist]
-packages = ["robotpy_build"]
+packages = ["src/semiwrap"]
 exclude = [
-    "/robotpy_build/pybind11"
+    "/src/semiwrap/pybind11"
 ]
 [tool.hatch.build.targets.sdist.force-include]
-"./robotpy_build/pybind11/include" = "./robotpy_build/pybind11/include"
-
-[tool.hatch.build.targets.wheel]
-packages = ["robotpy_build"]
-include = [
-    "/robotpy_build/pybind11/include",
-    "/robotpy_build/include",
-]
+"./src/semiwrap/pybind11/include" = "./semiwrap/pybind11/include"
 
 
 [tool.black]
 target-version = ["py38"]
 extend-exclude = '''
-^/robotpy_build/pybind11
+^/src/semiwrap/pybind11
 '''
diff --git a/robotpy_build/command/_built_env.py b/robotpy_build/command/_built_env.py
deleted file mode 100644
index 91e8608d..00000000
--- a/robotpy_build/command/_built_env.py
+++ /dev/null
@@ -1,79 +0,0 @@
-import importlib.util
-import os
-from os.path import abspath, exists, dirname, join
-
-from setuptools import Command
-
-from .util import get_install_root
-
-
-class _BuiltEnv(Command):
-
-    user_options = [("build-lib=", "d", 'directory to "build" (copy) to')]
-
-    def initialize_options(self):
-        self.build_lib = None
-
-    def finalize_options(self):
-        self.set_undefined_options("build", ("build_lib", "build_lib"))
-
-    def setup_built_env(self):
-
-        # Gather information for n
-        data = {"mapping": {}}
-
-        # OSX-specific: need to set DYLD_LIBRARY_PATH otherwise modules don't
-        # work. Luckily, that information was computed when building the
-        # extensions...
-        env = os.environ.copy()
-        dyld_path = set()
-
-        # Requires information from build_ext to work
-        build_ext = self.get_finalized_command("build_ext")
-        if build_ext.inplace:
-            data["out"] = get_install_root(self)
-        else:
-            data["out"] = self.build_lib
-
-        # Ensure that the associated packages can always be found locally
-        for wrapper in build_ext.wrappers:
-            pkgdir = wrapper.package_name.split(".")
-            init_py = abspath(join(self.build_lib, *pkgdir, "__init__.py"))
-            if exists(init_py):
-                data["mapping"][wrapper.package_name] = init_py
-
-        # Ensure that the built extension can always be found
-        build_ext.resolve_libs()
-        for ext in build_ext.extensions:
-            fname = build_ext.get_ext_filename(ext.name)
-            data["mapping"][ext.name] = abspath(join(self.build_lib, fname))
-
-            rpybuild_libs = getattr(ext, "rpybuild_libs", None)
-            if rpybuild_libs:
-                for pth, _ in rpybuild_libs.values():
-                    dyld_path.add(dirname(pth))
-
-        # OSX-specific
-        if dyld_path:
-            dyld_path = ":".join(dyld_path)
-            if "DYLD_LIBRARY_PATH" in env:
-                dyld_path += ":" + env["DYLD_LIBRARY_PATH"]
-            env["DYLD_LIBRARY_PATH"] = dyld_path
-
-        return data, env
-
-
-class _PackageFinder:
-    """
-    Custom loader to allow loading built modules from their location
-    in the build directory (as opposed to their install location)
-    """
-
-    # Set this to mapping returned from _BuiltEnv.setup_built_env
-    mapping = {}
-
-    @classmethod
-    def find_spec(cls, fullname, path, target=None):
-        m = cls.mapping.get(fullname)
-        if m:
-            return importlib.util.spec_from_file_location(fullname, m)
diff --git a/robotpy_build/command/build_dl.py b/robotpy_build/command/build_dl.py
deleted file mode 100644
index 10f35f5a..00000000
--- a/robotpy_build/command/build_dl.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from distutils.core import Command
-from typing import List
-import os
-import os.path
-import subprocess
-import sys
-import sysconfig
-
-from ..platforms import get_platform
-from ..static_libs import StaticLib
-from ..wrapper import Wrapper
-from .util import get_install_root
-
-
-debug = os.environ.get("RPYBUILD_DEBUG") == "1"
-
-
-class BuildDl(Command):
-    command_name = "build_dl"
-    description = "Downloads files"
-    user_options = [
-        ("build-base=", "b", "base directory for build library"),
-        ("build-temp=", "t", "temporary build directory"),
-        ("build-cache=", None, "build directory to cache downloaded objects"),
-        ("src-unpack-to=", None, "build directory to unpack sources to"),
-        ("lib-unpack-to=", None, "build directory to unpack static libs to"),
-    ]
-    wrappers: List[Wrapper] = []
-    static_libs: List[StaticLib] = []
-
-    def initialize_options(self):
-        self.build_base = None
-        self.build_cache = None
-        self.build_temp = None
-        self.src_unpack_to = None
-        self.lib_unpack_to = None
-
-    def finalize_options(self):
-        self.set_undefined_options(
-            "build", ("build_base", "build_base"), ("build_temp", "build_temp")
-        )
-        if self.build_cache is None:
-            self.build_cache = os.path.join(self.build_base, "cache")
-        if self.src_unpack_to is None:
-            self.src_unpack_to = os.path.join(self.build_temp, "dlsrc")
-        if self.lib_unpack_to is None:
-            self.lib_unpack_to = os.path.join(self.build_temp, "dlstatic")
-
-    def run(self):
-        all_libs = []
-
-        for lib in self.static_libs:
-            lib.on_build_dl(self.build_cache, self.lib_unpack_to)
-        for wrapper in self.wrappers:
-            all_libs += wrapper.on_build_dl(self.build_cache, self.src_unpack_to)
-
-        # On OSX, fix library loader paths for embedded libraries
-        # -> this happens here so that the libs are modified before build_py
-        #    copies them. Extensions are fixed after build
-        platform = get_platform()
-        if platform.os == "osx":
-            from ..relink_libs import relink_libs
-
-            install_root = get_install_root(self)
-            for wrapper in self.wrappers:
-                relink_libs(install_root, wrapper, self.rpybuild_pkgcfg)
-
-        elif not debug and platform.os == "linux":
-            # strip any downloaded libraries
-            strip_exe = "strip"
-            if getattr(sys, "cross_compiling", False):
-                # This is a hack, but the information doesn't seem to be available
-                # in other accessible ways
-                ar_exe = sysconfig.get_config_var("AR")
-                if ar_exe.endswith("-ar"):
-                    strip_exe = f"{ar_exe[:-3]}-strip"
-
-            for lib in all_libs:
-                print(strip_exe, lib)
-                subprocess.check_call([strip_exe, lib])
diff --git a/robotpy_build/command/build_ext.py b/robotpy_build/command/build_ext.py
deleted file mode 100644
index 135e3e21..00000000
--- a/robotpy_build/command/build_ext.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#
-# Portions copied from pybind11's setup_helpers.py
-#
-
-import os
-from os.path import join
-from setuptools.command.build_ext import build_ext
-import platform
-import setuptools
-import sys
-import sysconfig
-import tempfile
-
-from .util import get_install_root
-from ..platforms import get_platform
-
-# TODO: only works for GCC
-debug = os.environ.get("RPYBUILD_DEBUG") == "1"
-
-WIN = sys.platform.startswith("win32") and "mingw" not in sysconfig.get_platform()
-MACOS = sys.platform.startswith("darwin")
-STD_TMPL = "/std:c++{}" if WIN else "-std=c++{}"
-
-
-# As of Python 3.6, CCompiler has a `has_flag` method.
-# cf http://bugs.python.org/issue26689
-def has_flag(compiler, flagname):
-    """Return a boolean indicating whether a flag name is supported on
-    the specified compiler.
-    """
-    with tempfile.TemporaryDirectory() as tmpdir:
-        fname = join(tmpdir, "test.cpp")
-        with open(fname, "w") as fp:
-            fp.write("int main (int argc, char **argv) { return 0; }")
-        try:
-            compiler.compile([fname], output_dir=tmpdir, extra_postargs=[flagname])
-        except setuptools.distutils.errors.CompileError:
-            return False
-    return True
-
-
-def cxx_std(compiler) -> int:
-    """Return the -std=c++[11/14/17/20] compiler flag.
-    The newer version is prefered over c++11 (when it is available).
-    """
-
-    for level in (20, 17, 17, 11):
-        if has_flag(compiler, STD_TMPL.format(level)):
-            return level
-
-    raise RuntimeError("Unsupported compiler -- at least C++11 support is needed!")
-
-
-def get_opts(typ, std):
-    c_opts = {"msvc": ["/EHsc", "/bigobj"], "unix": []}
-    l_opts = {"msvc": [], "unix": []}
-
-    plat = get_platform()
-    if plat.os == "osx":
-        darwin_opts = ["-stdlib=libc++"]
-
-        if "MACOSX_DEPLOYMENT_TARGET" not in os.environ:
-            # C++17 requires a higher min version of macOS. An earlier version
-            # (10.12 or 10.13) can be set manually via environment variable if
-            # you are careful in your feature usage, but 10.14 is the safest
-            # setting for general use. However, never set higher than the
-            # current macOS version!
-            current_macos = tuple(int(x) for x in platform.mac_ver()[0].split(".")[:2])
-            if std == 20:
-                desired_macos = (10, 15)
-            elif std == 17:
-                desired_macos = (10, 14)
-            else:
-                desired_macos = (10, 9)
-
-            macos_string = ".".join(str(x) for x in min(current_macos, desired_macos))
-            darwin_opts.append(f"-mmacosx-version-min={macos_string}")
-
-        c_opts["unix"] += darwin_opts
-        l_opts["unix"] += darwin_opts + ["-headerpad_max_install_names"]
-
-    return c_opts.get(typ, []), l_opts.get(typ, [])
-
-
-class BuildExt(build_ext):
-    """A custom build extension for adding compiler-specific options."""
-
-    def build_extensions(self):
-        ct = self.compiler.compiler_type
-        std = cxx_std(self.compiler)
-        opts, link_opts = get_opts(ct, std)
-
-        # To support ccache on windows
-        cc_launcher = os.environ.get("RPYBUILD_CC_LAUNCHER")
-
-        if ct == "unix":
-            opts.append("-s")  # strip
-            if debug:
-                opts.append("-ggdb3")
-                opts.append("-UNDEBUG")
-            else:
-                opts.append("-g0")  # remove debug symbols
-            opts.append(STD_TMPL.format(std))
-            if has_flag(self.compiler, "-fvisibility=hidden"):
-                opts.append("-fvisibility=hidden")
-
-            if cc_launcher:
-                self.compiler.compiler.insert(0, cc_launcher)
-                self.compiler.compiler_so.insert(0, cc_launcher)
-                try:
-                    # setuptools v72.2.0 added C++ support
-                    self.compiler.compiler_so_cxx.insert(0, cc_launcher)
-                except AttributeError:
-                    pass
-                # compiler_cxx is only used for linking, so we don't mess with it
-                # .. distutils is so weird
-                # self.compiler.compiler_cxx.insert(0, cc_launcher)
-        elif ct == "msvc":
-            opts.append(STD_TMPL.format(std))
-            opts.append("/Zc:__cplusplus")
-            # Enable standards-compliant preprocessor
-            if has_flag(self.compiler, "/Zc:preprocessor"):
-                opts.append("/Zc:preprocessor")
-            # Enable utf-8 source files.. this probably shouldn't be set globally
-            if has_flag(self.compiler, "/utf-8"):
-                opts.append("/utf-8")
-            if cc_launcher:
-                # yes, this is terrible. There's really no other way with distutils
-                def _spawn(cmd):
-                    if cmd[0] == self.compiler.cc:
-                        cmd.insert(0, cc_launcher)
-                    self.compiler._rpy_spawn(cmd)
-
-                self.compiler._rpy_spawn = self.compiler.spawn
-                self.compiler.spawn = _spawn
-        for ext in self.extensions:
-            if debug:
-                ext.define_macros.append(
-                    ("PYBIND11_ASSERT_GIL_HELD_INCREF_DECREF", "1")
-                )
-            ext.extra_compile_args = opts
-            ext.extra_link_args = link_opts
-
-        # self._gather_global_includes()
-
-        build_ext.build_extensions(self)
-
-        # Fix Libraries on macOS
-        # Uses @loader_path, is compatible with macOS >= 10.4
-        platform = get_platform()
-        if platform.os == "osx":
-            from ..relink_libs import relink_extension
-
-            install_root = get_install_root(self)
-
-            for ext in self.extensions:
-                libs = relink_extension(
-                    install_root,
-                    self.get_ext_fullpath(ext.name),
-                    self.get_ext_filename(ext.name),
-                    ext.rpybuild_wrapper,
-                    self.rpybuild_pkgcfg,
-                )
-
-                # Used in build_pyi
-                ext.rpybuild_libs = libs
-
-    def resolve_libs(self):
-        # used in _built_env
-        platform = get_platform()
-        if platform.os == "osx":
-            for wrapper in self.wrappers:
-                wrapper.finalize_extension()
-
-            from ..relink_libs import resolve_libs
-
-            install_root = get_install_root(self)
-
-            for ext in self.extensions:
-                libs = resolve_libs(
-                    install_root,
-                    ext.rpybuild_wrapper,
-                    self.rpybuild_pkgcfg,
-                )
-
-                # Used in build_pyi
-                ext.rpybuild_libs = libs
-
-    def run(self):
-        # files need to be generated before building can occur
-        self.run_command("build_gen")
-
-        for wrapper in self.wrappers:
-            wrapper.finalize_extension()
-
-        build_ext.run(self)
-
-        # pyi can only be built after ext is built
-        self.run_command("build_pyi")
-
-    def get_libraries(self, ext):
-        libraries = build_ext.get_libraries(self, ext)
-
-        if (
-            sys.platform != "win32"
-            and os.environ.get("RPYBUILD_STRIP_LIBPYTHON") == "1"
-        ):
-            pythonlib = "python{}.{}".format(
-                sys.hexversion >> 24, (sys.hexversion >> 16) & 0xFF
-            )
-            libraries = [lib for lib in libraries if not lib.startswith(pythonlib)]
-
-        return libraries
-
-
-parallel = int(os.environ.get("RPYBUILD_PARALLEL", "0"))
-if parallel > 0:
-    # don't enable this hack by default, because not really sure of the
-    # ramifications -- however, it's really useful for development
-    #
-    # .. the real answer to this is cmake o_O
-
-    # monkey-patch for parallel compilation
-    # -> https://stackoverflow.com/questions/11013851/speeding-up-build-process-with-distutils/13176803#13176803
-    def parallelCCompile(
-        self,
-        sources,
-        output_dir=None,
-        macros=None,
-        include_dirs=None,
-        debug=0,
-        extra_preargs=None,
-        extra_postargs=None,
-        depends=None,
-    ):
-        # those lines are copied from distutils.ccompiler.CCompiler directly
-        macros, objects, extra_postargs, pp_opts, build = self._setup_compile(
-            output_dir, macros, include_dirs, sources, depends, extra_postargs
-        )
-        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
-        # parallel code
-        import multiprocessing
-        import multiprocessing.pool
-
-        N = multiprocessing.cpu_count() if parallel == 1 else parallel
-
-        def _single_compile(obj):
-            try:
-                src, ext = build[obj]
-            except KeyError:
-                return
-            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
-
-        for _ in multiprocessing.pool.ThreadPool(N).imap(_single_compile, objects):
-            pass
-        return objects
-
-    import distutils.ccompiler
-
-    distutils.ccompiler.CCompiler.compile = parallelCCompile
diff --git a/robotpy_build/command/build_gen.py b/robotpy_build/command/build_gen.py
deleted file mode 100644
index 04efea87..00000000
--- a/robotpy_build/command/build_gen.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from distutils.core import Command
-from typing import List
-import os.path
-
-from ..wrapper import Wrapper
-
-
-class BuildGen(Command):
-    command_name = "build_gen"
-    description = "Generates source files"
-    user_options = [
-        ("build-base=", "b", "base directory for build library"),
-        ("build-temp=", "t", "temporary build directory"),
-        ("cxx-gen-dir=", "b", "Directory to write generated C++ files"),
-    ]
-    wrappers: List[Wrapper] = []
-
-    def initialize_options(self):
-        self.build_base = None
-        self.build_temp = None
-        self.cxx_gen_dir = None
-
-    def finalize_options(self):
-        self.set_undefined_options(
-            "build", ("build_base", "build_base"), ("build_temp", "build_temp")
-        )
-        if self.cxx_gen_dir is None:
-            self.cxx_gen_dir = os.path.join(self.build_temp, "gensrc")
-
-    def run(self):
-        # files need to be downloaded before building can occur
-        self.run_command("build_dl")
-
-        for wrapper in self.wrappers:
-            wrapper.on_build_gen(self.cxx_gen_dir)
diff --git a/robotpy_build/command/build_py.py b/robotpy_build/command/build_py.py
deleted file mode 100644
index ab9234ce..00000000
--- a/robotpy_build/command/build_py.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from typing import List
-
-from setuptools.command.build_py import build_py
-
-from ..wrapper import Wrapper
-
-
-class BuildPy(build_py):
-    wrappers: List[Wrapper] = []
-
-    def run(self):
-        # files need to be generated before building can occur
-        # -> otherwise they're not included in the bdist
-        self.run_command("build_gen")
-
-        # Add the generated files to the package data
-        for package, _, _, filenames in self.data_files:
-            for wrapper in self.wrappers:
-                if wrapper.package_name == package:
-                    filenames.extend(wrapper.additional_data_files)
-                    break
-
-        build_py.run(self)
diff --git a/robotpy_build/command/build_pyi.py b/robotpy_build/command/build_pyi.py
deleted file mode 100644
index 3eaaf3a1..00000000
--- a/robotpy_build/command/build_pyi.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import json
-import os
-from os.path import exists, dirname, join
-import subprocess
-import sys
-
-import pybind11_stubgen
-
-try:
-    from setuptools.errors import BaseError
-except ImportError:
-    from distutils.errors import DistutilsError as BaseError
-
-from ._built_env import _BuiltEnv, _PackageFinder
-
-
-class GeneratePyiError(BaseError):
-    pass
-
-
-class BuildPyi(_BuiltEnv):
-    base_package: str
-
-    command_name = "build_pyi"
-    description = "Generates pyi files from built extensions"
-
-    def run(self):
-        # cannot build pyi files when cross-compiling
-        if (
-            "_PYTHON_HOST_PLATFORM" in os.environ
-            or "PYTHON_CROSSENV" in os.environ
-            or os.environ.get("RPYBUILD_SKIP_PYI") == "1"
-        ):
-            return
-
-        # Gather information for needed stubs
-        data, env = self.setup_built_env()
-        data["stubs"] = []
-
-        # Ensure that the built extension can always be found
-        build_ext = self.get_finalized_command("build_ext")
-        for ext in build_ext.extensions:
-            data["stubs"].append(ext.name)
-
-        # Don't do anything if nothing is needed
-        if not data["stubs"]:
-            return
-
-        data_json = json.dumps(data)
-
-        # Execute in a subprocess in case it crashes
-        args = [sys.executable, "-m", __name__]
-        try:
-            subprocess.run(args, input=data_json.encode("utf-8"), env=env, check=True)
-        except subprocess.CalledProcessError:
-            raise GeneratePyiError(
-                "Failed to generate .pyi file (see above, or set RPYBUILD_SKIP_PYI=1 to ignore) via %s"
-                % (args,)
-            ) from None
-
-        # Create a py.typed for PEP 561
-        with open(join(data["out"], *self.base_package.split("."), "py.typed"), "w"):
-            pass
-
-
-def main():
-    cfg = json.load(sys.stdin)
-
-    # Configure custom loader
-    _PackageFinder.mapping = cfg["mapping"]
-    sys.meta_path.insert(0, _PackageFinder)
-
-    # Generate pyi modules
-    out = cfg["out"]
-    for stub in cfg["stubs"]:
-        sys.argv = [
-            "<dummy>",
-            "--exit-code",
-            "--ignore-invalid-expressions=<.*>",
-            "--root-suffix=",
-            "-o",
-            out,
-            stub,
-        ]
-
-        pybind11_stubgen.main()
-
-
-if __name__ == "__main__":
-    main()
diff --git a/robotpy_build/command/develop.py b/robotpy_build/command/develop.py
deleted file mode 100644
index 088428b0..00000000
--- a/robotpy_build/command/develop.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from os.path import abspath
-from setuptools.command.develop import develop
-
-
-class Develop(develop):
-    def run(self):
-        self.distribution.rpybuild_develop_path = abspath(self.egg_base)
-        develop.run(self)
-
-        # if not uninstall, perform fixups on OSX?
diff --git a/robotpy_build/command/editable_wheel.py b/robotpy_build/command/editable_wheel.py
deleted file mode 100644
index e0f85ff6..00000000
--- a/robotpy_build/command/editable_wheel.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from os.path import abspath
-from setuptools.command.editable_wheel import editable_wheel
-
-
-class EditableWheel(editable_wheel):
-    def run(self):
-        # you aren't supposed to do this, but... they broke my workflow
-        self.distribution.rpybuild_develop_path = abspath(self.project_dir)
-        editable_wheel.run(self)
diff --git a/robotpy_build/command/util.py b/robotpy_build/command/util.py
deleted file mode 100644
index 1dca29dd..00000000
--- a/robotpy_build/command/util.py
+++ /dev/null
@@ -1,18 +0,0 @@
-def get_install_root(cmd):
-    # hack
-    install_root = getattr(cmd.distribution, "rpybuild_develop_path", None)
-    if not install_root:
-        inst_command = cmd.distribution.get_command_obj("install")
-        inst_command.ensure_finalized()
-        install_root = inst_command.install_platlib
-
-    return install_root
-
-
-def get_build_temp_path():
-    import distutils.dist
-    import distutils.command.build
-
-    b = distutils.command.build.build(distutils.dist.Distribution())
-    b.finalize_options()
-    return b.build_temp
diff --git a/robotpy_build/config/dev_yml.py b/robotpy_build/config/dev_yml.py
deleted file mode 100644
index 15918759..00000000
--- a/robotpy_build/config/dev_yml.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import os
-from typing import Optional, List
-import yaml
-
-from .util import Model
-
-
-class DevConfig(Model):
-    """
-    Configuration options useful for developing robotpy-build wrappers.
-    To use these, set the environment variable RPYBUILD_GEN_FILTER=filename.yml
-    """
-
-    #: When set this will only generate new wrapping code for the specified
-    #: headers (left side of generate in pyproject.toml). Existing wrapping
-    #: code will not be deleted during a build.
-    #:
-    #: Useful in conjunction with ccache
-    only_generate: Optional[List[str]] = None
-
-
-def get_dev_config(name: str) -> Optional[DevConfig]:
-    # name is the wrapper config, not used currently
-    genfilter = os.environ.get("RPYBUILD_GEN_FILTER")
-    if genfilter:
-        with open(genfilter) as fp:
-            data = yaml.safe_load(fp)
-    else:
-        data = {}
-
-    return DevConfig(**data)
diff --git a/robotpy_build/config/pyproject_toml.py b/robotpy_build/config/pyproject_toml.py
deleted file mode 100644
index 67898589..00000000
--- a/robotpy_build/config/pyproject_toml.py
+++ /dev/null
@@ -1,441 +0,0 @@
-#
-# pyproject.toml
-#
-
-import re
-from typing import Dict, List, Optional
-
-from .util import Model
-
-_arch_re = re.compile(r"\{\{\s*ARCH\s*\}\}")
-_os_re = re.compile(r"\{\{\s*OS\s*\}\}")
-
-
-class PatchInfo(Model):
-    """
-    A unified diff to apply to downloaded source code before building a
-    a wrapper.
-
-    .. code-block:: toml
-
-       [[tool.robotpy-build.wrappers."MY.PACKAGE.NAME".maven_lib_download.patches]]
-       patch = "path/to/my.patch"
-       strip = 0
-    """
-
-    #: Name of patch file to apply
-    patch: str
-
-    #: Number of directories to strip
-    strip: int = 0
-
-
-class MavenLibDownload(Model):
-    """
-    Used to download artifacts from a maven repository. This can download
-    headers, shared libraries, and sources.
-
-    .. code-block:: toml
-
-       [tool.robotpy-build.wrappers."PACKAGENAME".maven_lib_download]
-       artifact_id = "mything"
-       group_id = "com.example.thing"
-       repo_url = "http://example.com/maven"
-       version = "1.2.3"
-
-    .. note:: For FIRST Robotics libraries, the information required can
-              be found in the vendor JSON file
-    """
-
-    #: Maven artifact ID
-    artifact_id: str
-
-    #: Maven group ID
-    group_id: str
-
-    #: Maven repository URL
-    repo_url: str
-
-    #: Version of artifact to download
-    version: str
-
-    #: Configure the sources classifier
-    sources_classifier: str = "sources"
-
-    #: When set, download sources instead of downloading libraries. When
-    #: using this, you need to manually add the sources to the configuration
-    #: to be compiled via :attr:`sources`.
-    use_sources: bool = False
-
-    # common with Download
-
-    #: Names of contained shared libraries (in loading order). If None,
-    #: set to artifact_id.
-    libs: Optional[List[str]] = None
-
-    #: Names of contained shared link only libraries (in loading order). If None,
-    #: set to name. If empty list, link only libs will not be downloaded.
-    dlopenlibs: Optional[List[str]] = None
-
-    #: Library extensions map
-    libexts: Dict[str, str] = {}
-
-    #: Compile time extensions map
-    linkexts: Dict[str, str] = {}
-
-    #: If :attr:`use_sources` is set, this is the list of sources to compile
-    sources: Optional[List[str]] = None
-
-    #: If :attr:`use_sources` is set, apply the following patches to the sources. Patches
-    #: must be in unified diff format.
-    patches: Optional[List[PatchInfo]] = None
-
-    #: Patches to downloaded header files. Patches must be in unified diff format.
-    header_patches: Optional[List[PatchInfo]] = None
-
-
-class Download(Model):
-    """
-    Download sources/libs/includes from a single file
-
-    .. code-block:: toml
-
-       [[tool.robotpy-build.wrappers."PACKAGENAME".download]]
-       url = "https://my/url/something.zip"
-       incdir = "include"
-       libs = ["mylib"]
-
-    """
-
-    #: URL of zipfile to download
-    #:
-    #: {{ARCH}} and {{OS}} are replaced with the architecture/os name
-    url: str
-
-    #: Directory that contains include files.
-    #:
-    #: {{ARCH}} and {{OS}} are replaced with the architecture/os name
-    incdir: Optional[str] = None
-
-    #: Directory that contains library files
-    #:
-    #: {{ARCH}} and {{OS}} are replaced with the architecture/os name
-    libdir: str = ""
-
-    #: Extra include paths, relative to the include directory
-    #:
-    #: {{ARCH}} and {{OS}} are replaced with the architecture/os name
-    extra_includes: List[str] = []
-
-    # Common with MavenLibDownload
-
-    #: If specified, names of contained shared libraries (in loading order)
-    libs: Optional[List[str]] = None
-
-    #: If specified, names of contained shared link only libraries (in loading order).
-    #: If None, set to name. If empty list, link only libs will not be downloaded.
-    dlopenlibs: Optional[List[str]] = None
-
-    #: Library extensions map
-    libexts: Dict[str, str] = {}
-
-    #: Compile time extensions map
-    linkexts: Dict[str, str] = {}
-
-    #: List of sources to compile
-    sources: Optional[List[str]] = None
-
-    #: If :attr:`sources` is set, apply the following patches to the sources. Patches
-    #: must be in unified diff format.
-    patches: Optional[List[PatchInfo]] = None
-
-    #: Patches to downloaded header files in incdir. Patches must be in unified
-    #: diff format.
-    header_patches: Optional[List[PatchInfo]] = None
-
-    def _update_with_platform(self, platform):
-        for n in ("url", "incdir", "libdir"):
-            v = getattr(self, n, None)
-            if v is not None:
-                v = _os_re.sub(platform.os, _arch_re.sub(platform.arch, v))
-                setattr(self, n, v)
-
-        if self.extra_includes:
-            self.extra_includes = [
-                _os_re.sub(platform.os, _arch_re.sub(platform.arch, v))
-                for v in self.extra_includes
-            ]
-
-
-class StaticLibConfig(Model):
-    """
-    Static libraries that can be consumed as a dependency by other wrappers
-    in the same project. Static libraries are not directly installed, and
-    as a result cannot be consumed by other projects.
-
-    .. code-block:: toml
-
-       [tool.robotpy-build.static_libs."MY.PACKAGE.NAME"]
-
-    """
-
-    #: If this project depends on external libraries stored in a maven repo
-    #: specify it here
-    maven_lib_download: Optional[MavenLibDownload] = None
-
-    #: If this project depends on external libraries downloadable from some URL
-    #: specify it here
-    download: Optional[List[Download]] = None
-
-    #: If True, skip this library; typically used in conjection with an override
-    ignore: bool = False
-
-
-class TypeCasterConfig(Model):
-    """
-    Specifies type casters that this package exports. robotpy-build
-    will attempt to detect these types at generation time and include
-    them in generated wrappers.
-
-    .. code-block:: toml
-
-       [[tool.robotpy-build.wrappers."PACKAGENAME".type_casters]]
-       header = "my_type_caster.h"
-       types = ["foo_t", "ns::ins::bar_t"]
-
-    .. seealso:: :ref:`type_casters`
-    """
-
-    #: Header file to include when one of the types are detected in a wrapper
-    header: str
-
-    #: Types to look for to indicate that this type caster header should be
-    #: included.
-    types: List[str]
-
-    #: If a parameter type that requires this type caster requires a default
-    #: argument, a C-style ``(type)`` cast is used on the default argument.
-    #:
-    #: The default cast can be disabled via param_override's ``disable_type_caster_default_cast``
-    default_arg_cast: bool = False
-
-
-class WrapperConfig(Model):
-    """
-    Configuration for building a C++ python extension module, optionally
-    using autogenerated wrappers around existing library code.
-
-    .. code-block:: toml
-
-       [tool.robotpy-build.wrappers."PACKAGENAME"]
-       name = "package_name"
-
-    The PACKAGENAME above is a python package (eg "example.package.name").
-    A robotpy-build project can contain many different wrappers and packages.
-    """
-
-    #: Name that other projects/wrappers use in their 'depends' list
-    name: str
-
-    #: Name of extension to build. If None, set to _{name}
-    extension: Optional[str] = None
-
-    #: Name of generated file that ensures the shared libraries and any
-    #: dependencies are loaded. Defaults to ``_init{extension}.py``
-    #:
-    #: Generally, you should create an ``__init__.py`` file that imports
-    #: this module, otherwise your users will need to do so.
-    libinit: Optional[str] = None
-
-    #: List of robotpy-build library dependencies. This affects this wrapper
-    #: library in the following ways:
-    #:
-    #: * Any include file directories exported by the dependency will be added
-    #:   to the include path for any source files compiled by this wrapper
-    #: * It will be linked to any libraries the dependency contains
-    #: * The python module for the dependency will be imported in the
-    #:   ``_init{extension}.py`` file.
-    depends: List[str] = []
-
-    #: If this project depends on external libraries stored in a maven repo
-    #: specify it here.
-    maven_lib_download: Optional[MavenLibDownload] = None
-
-    #: If this project depends on external libraries downloadable from some URL
-    #: specify it here
-    download: Optional[List[Download]] = None
-
-    #: List of extra include directories to export, relative to the
-    #: project root.
-    extra_includes: List[str] = []
-
-    #: Optional source files to compile. Path is relative to the root of
-    #: the project.
-    sources: List[str] = []
-
-    #: Specifies header files that autogenerated pybind11 wrappers will be
-    #: created for. Simple C++ headers will most likely 'just work', but
-    #: complex headers will need to have an accompanying :attr:`generation_data`
-    #: file specified that can customize the autogenerated files.
-    #:
-    #: List of dictionaries: each dictionary key is used for the function
-    #: name of the initialization function, the value is the header that is
-    #: being wrapped. The header is first looked for relative to the
-    #: package, then relative to each include directory (including
-    #: downloaded and extracted packages).
-    #:
-    #: .. code-block:: toml
-    #:
-    #:    [tool.robotpy-build.wrappers."PACKAGENAME".autogen_headers]
-    #:    Name = "header.h"
-    #:
-    #: .. seealso:: :ref:`autowrap`
-    #:
-    autogen_headers: Optional[Dict[str, str]] = None
-
-    #: DEPRECATED: Same as autogen_headers, but more complicated
-    generate: Optional[List[Dict[str, str]]] = None
-
-    #: Path to a single data.yml to use during code generation, or a directory
-    #: of yaml files. If a directory, generation data will be looked up
-    #: using the key in the generate dictionary.
-    #:
-    #: These YAML files can be generated via the robotpy-build command line tool:
-    #:
-    #: .. code-block:: sh
-    #:
-    #:    robotpy-build create-gen --write
-    #:
-    #: .. seealso:: :ref:`gendata`
-    #:
-    generation_data: Optional[str] = None
-
-    #: Specifies type casters that this package exports.
-    type_casters: List[TypeCasterConfig] = []
-
-    #: Preprocessor definitions to apply when compiling this wrapper.
-    pp_defines: List[str] = []
-
-    #: If True, skip this wrapper; typically used in conjection with an override.
-    ignore: bool = False
-
-
-class DistutilsMetadata(Model):
-    """
-    Configures the metadata that robotpy-build passes to setuptools when
-    the project is installed. The keys in this section match the standard
-    arguments passed to the ``setuptools.setup`` function.
-
-    .. code-block:: toml
-
-       [tool.robotpy-build.metadata]
-       name = "my-awesome-dist"
-       description = "Cool thing"
-       license = "MIT"
-
-    robotpy-build will automatically detect/set the following keys:
-
-    * cmdclass
-    * ext_modules
-    * include_package_data - ``True``
-    * long_description - Contents of README.md/README.rst
-    * long_description_content_type - If required
-    * packages
-    * python_requires - ``>=3.6``
-    * version - via setuptools_scm
-    * zip_safe - ``False``
-
-    .. note:: This section is required
-    """
-
-    class Config:
-        # allow passing in extra keywords to setuptools
-        extra = "allow"
-
-    #: The name of the package
-    name: str
-
-    #: A single line describing the package
-    description: Optional[str] = None
-
-    #: The name of the package author
-    author: str
-
-    #: The email address of the package author
-    author_email: str
-
-    #: A URL for the package (homepage)
-    url: str
-
-    #: The license for the package
-    license: str
-
-    #: A string or list of strings specifying what other distributions need
-    #: to be installed when this one is. If the requirement is ``==THIS_VERSION``,
-    #: the requirement is set to be the same version as this package
-    install_requires: List[str]
-
-
-class SupportedPlatform(Model):
-    """
-    Supported platforms for this project. Currently this information is
-    merely advisory, and is used to generate error messages when platform
-    specific downloads fail.
-
-    .. code-block:: toml
-
-       [tool.robotpy-build]
-       base_package = "my.package"
-       supported_platforms = [
-           { os = "windows", arch = "x86-64" },
-       ]
-
-    .. seealso:: List of supported :ref:`platforms <platforms>`
-
-    """
-
-    #: Platform operating system name
-    os: Optional[str] = None
-
-    #: Platform architecture
-    arch: Optional[str] = None
-
-
-class RobotpyBuildConfig(Model):
-    """
-    Contains information for configuring the project
-
-    .. code-block:: toml
-
-       [tool.robotpy-build]
-       base_package = "my.package"
-
-    .. note:: This section is required
-    """
-
-    #: Python package to store version information and robotpy-build metadata in
-    base_package: str
-
-    #: List of headers for the scan-headers tool to ignore
-    scan_headers_ignore: List[str] = []
-
-    #: List of python packages with __init__.py to update when ``python setup.py update_init``
-    #: is called -- this is an argument to the ``robotpy-build create-imports`` command, and
-    #: may contain a space and the second argument to create-imports.
-    update_init: List[str] = []
-
-    #:
-    #: .. seealso:: :class:`.SupportedPlatform`
-    #:
-    supported_platforms: List[SupportedPlatform] = []
-
-    #
-    # These are all documented in their class, it's more confusing to document
-    # them here too.
-    #
-
-    metadata: DistutilsMetadata
-
-    wrappers: Dict[str, WrapperConfig] = {}
-
-    static_libs: Dict[str, StaticLibConfig] = {}
diff --git a/robotpy_build/config/util.py b/robotpy_build/config/util.py
deleted file mode 100644
index 5caec00c..00000000
--- a/robotpy_build/config/util.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import os
-from pydantic import BaseModel
-
-# Needed because pydantic gets in the way of generating good docs
-_generating_documentation = bool(os.environ.get("GENERATING_DOCUMENTATION"))
-if _generating_documentation:
-    BaseModel = object
-
-
-class Model(BaseModel):
-    class Config:
-        extra = "forbid"
diff --git a/robotpy_build/download.py b/robotpy_build/download.py
deleted file mode 100644
index 0dad5b1c..00000000
--- a/robotpy_build/download.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import atexit
-import contextlib
-import os
-from os.path import dirname, exists, join, normpath
-import posixpath
-import shutil
-import sys
-import urllib.request
-import tempfile
-import zipfile
-
-from .version import version
-
-
-USER_AGENT = f"robotpy-build/{version}"
-SHOW_PROGRESS = "CI" not in os.environ
-
-
-def _download(url: str, dst_fname: str):
-    """
-    Downloads a file to a specified directory
-    """
-
-    def _reporthook(count, blocksize, totalsize):
-        if SHOW_PROGRESS:
-            percent = int(count * blocksize * 100 / totalsize)
-            sys.stdout.write("\r%02d%%" % percent)
-            sys.stdout.flush()
-
-    print("Downloading", url)
-
-    request = urllib.request.Request(url, headers={"User-Agent": USER_AGENT})
-
-    with contextlib.closing(urllib.request.urlopen(request)) as fp:
-        headers = fp.info()
-
-        with open(dst_fname, "wb") as tfp:
-            # copied from urlretrieve source code, Python license
-            bs = 1024 * 8
-            size = -1
-            blocknum = 0
-            read = 0
-            if "content-length" in headers:
-                size = int(headers["Content-Length"])
-
-            while True:
-                block = fp.read(bs)
-                if not block:
-                    break
-                read += len(block)
-                tfp.write(block)
-                blocknum += 1
-                if _reporthook:
-                    _reporthook(blocknum, bs, size)
-
-    if SHOW_PROGRESS:
-        sys.stdout.write("\n")
-        sys.stdout.flush()
-
-
-def download_and_extract_zip(url, to, cache):
-    """
-    Utility method intended to be useful for downloading/extracting
-    third party source zipfiles
-
-    :param to: is either a string or a dict of {src: dst}
-    """
-
-    os.makedirs(cache, exist_ok=True)
-    zip_fname = join(cache, posixpath.basename(url))
-    if not exists(zip_fname):
-        _download(url, zip_fname)
-
-    with zipfile.ZipFile(zip_fname) as z:
-        if isinstance(to, str):
-            to = {"": to}
-
-        for src, dst in to.items():
-            if src == "":
-                z.extractall(dst)
-            else:
-                # if is directory, copy whole thing recursively
-                try:
-                    info = z.getinfo(src)
-                except KeyError as e:
-                    osrc = src
-                    src = src + "/"
-                    try:
-                        info = z.getinfo(src)
-                    except KeyError:
-                        info = None
-                    if info is None:
-                        msg = f"error extracting {osrc} from {zip_fname}"
-                        raise ValueError(msg) from e
-                if info.is_dir():
-                    ilen = len(info.filename)
-                    for minfo in z.infolist():
-                        if minfo.is_dir():
-                            continue
-                        srcname = posixpath.normpath(minfo.filename)
-                        if srcname.startswith(info.filename):
-                            dstname = join(dst, normpath(srcname[ilen:]))
-                            dstdir = dirname(dstname)
-                            if not exists(dstdir):
-                                os.makedirs(dstdir)
-                            with z.open(minfo.filename, "r") as zfp, open(
-                                dstname, "wb"
-                            ) as fp:
-                                shutil.copyfileobj(zfp, fp)
-                else:
-                    # otherwise write a single file
-                    with z.open(src, "r") as zfp, open(dst, "wb") as fp:
-                        shutil.copyfileobj(zfp, fp)
diff --git a/robotpy_build/maven.py b/robotpy_build/maven.py
deleted file mode 100644
index ae208dd8..00000000
--- a/robotpy_build/maven.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import typing
-
-from .config.pyproject_toml import Download, MavenLibDownload
-
-
-def _get_artifact_url(dlcfg: MavenLibDownload, classifier: str) -> str:
-    # TODO: support development against locally installed things?
-    repo_url = dlcfg.repo_url
-    grp = dlcfg.group_id.replace(".", "/")
-    art = dlcfg.artifact_id
-    ver = dlcfg.version
-
-    return f"{repo_url}/{grp}/{art}/{ver}/{art}-{ver}-{classifier}.zip"
-
-
-def convert_maven_to_downloads(
-    mcfg: MavenLibDownload, static: bool
-) -> typing.List[Download]:
-    """
-    Converts a MavenLibDownload object to a list of normal downloads
-    """
-
-    dl_lib = {}
-    dl_header = {}
-    dl_sources = {}
-
-    if mcfg.use_sources:
-        if static:
-            raise ValueError("Cannot specify sources in static_lib section")
-
-        # sources don't have libs, ignore them
-        dl_sources["url"] = _get_artifact_url(mcfg, mcfg.sources_classifier)
-        dl_sources["sources"] = mcfg.sources
-        dl_sources["patches"] = mcfg.patches
-    elif mcfg.sources is not None:
-        raise ValueError("sources must be None if use_sources is False!")
-    elif mcfg.patches is not None:
-        raise ValueError("patches must be None if use_sources is False!")
-    else:
-        # libs
-
-        dl_lib["libs"] = mcfg.libs
-        if mcfg.libs is mcfg.dlopenlibs is None:
-            dl_lib["libs"] = [mcfg.artifact_id]
-        dl_lib["dlopenlibs"] = mcfg.dlopenlibs
-        dl_lib["libexts"] = mcfg.libexts
-        dl_lib["linkexts"] = mcfg.linkexts
-
-        if static:
-            dl_lib["libdir"] = "{{ OS }}/{{ ARCH }}/static"
-            dl_lib["url"] = _get_artifact_url(mcfg, "{{ OS }}{{ ARCH }}static")
-        else:
-            dl_lib["libdir"] = "{{ OS }}/{{ ARCH }}/shared"
-            dl_lib["url"] = _get_artifact_url(mcfg, "{{ OS }}{{ ARCH }}")
-
-    # headers
-    dl_header["incdir"] = ""
-    dl_header["url"] = _get_artifact_url(mcfg, "headers")
-    dl_header["header_patches"] = mcfg.header_patches
-
-    # Construct downloads and return it
-    downloads = []
-    for d in (dl_lib, dl_header, dl_sources):
-        if d:
-            downloads.append(Download(**d))
-
-    return downloads
diff --git a/robotpy_build/pkgcfg.py b/robotpy_build/pkgcfg.py
deleted file mode 100644
index eb2dd547..00000000
--- a/robotpy_build/pkgcfg.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# Used by robotpybuild entry point
-
-from os.path import abspath, join, dirname
-from typing import Any, Dict, List, Optional
-
-_root = abspath(dirname(__file__))
-
-
-def get_include_dirs() -> Optional[List[str]]:
-    return [join(_root, "pybind11", "include"), join(_root, "include")]
-
-
-def get_library_dirs() -> Optional[List[str]]:
-    pass
-
-
-def get_type_casters_cfg(casters: Dict[str, Dict[str, Any]]) -> None:
-    casters.update(
-        {
-            # STL support
-            "std::vector": {"hdr": "pybind11/stl.h"},
-            "std::deque": {"hdr": "pybind11/stl.h"},
-            "std::list": {"hdr": "pybind11/stl.h"},
-            "std::array": {"hdr": "pybind11/stl.h"},
-            "std::valarray": {"hdr": "pybind11/stl.h"},
-            "std::set": {"hdr": "pybind11/stl.h"},
-            "std::map": {"hdr": "pybind11/stl.h"},
-            "std::unordered_map": {"hdr": "pybind11/stl.h"},
-            "std::optional": {"hdr": "pybind11/stl.h"},
-            "std::nullopt_t": {"hdr": "pybind11/stl.h"},
-            "std::variant": {"hdr": "pybind11/stl.h"},
-            "std::function": {"hdr": "pybind11/functional.h"},
-            "std::complex": {"hdr": "pybind11/complex.h"},
-            "std::chrono::duration": {"hdr": "pybind11/chrono.h"},
-            "std::chrono::time_point": {"hdr": "pybind11/chrono.h"},
-            # Eigen support (requires numpy)
-            "Eigen::Block": {"hdr": "pybind11/eigen.h"},
-            "Eigen::DiagonalMatrix": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixBase": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Matrix": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Matrix2d": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Matrix3d": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixXcd": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixXd": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixXdR": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixXi": {"hdr": "pybind11/eigen.h"},
-            "Eigen::MatrixXf": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Ref": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Matrix4d": {"hdr": "pybind11/eigen.h"},
-            "Eigen::RowVectorXf": {"hdr": "pybind11/eigen.h"},
-            "Eigen::SparseMatrix": {"hdr": "pybind11/eigen.h"},
-            "Eigen::SparseView": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Vector": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Vector2d": {"hdr": "pybind11/eigen.h"},
-            "Eigen::Vector3d": {"hdr": "pybind11/eigen.h"},
-            "Eigen::VectorXf": {"hdr": "pybind11/eigen.h"},
-            "Eigen::VectorXcf": {"hdr": "pybind11/eigen.h"},
-        }
-    )
-
-
-def get_type_casters(casters: Dict[str, str]) -> None:
-    t = {}
-    get_type_casters_cfg(t)
-    for k, v in t.items():
-        if "hdr" in v:
-            casters[k] = v["hdr"]
diff --git a/robotpy_build/pkgcfg_provider.py b/robotpy_build/pkgcfg_provider.py
deleted file mode 100644
index 3435a084..00000000
--- a/robotpy_build/pkgcfg_provider.py
+++ /dev/null
@@ -1,207 +0,0 @@
-import importlib.util
-from importlib.metadata import entry_points, EntryPoint
-from os.path import join, dirname
-import sys
-from typing import Dict, List, Optional, Set
-import warnings
-
-
-def _hacky_entrypoint_loader(module_name):
-    # load the root parent spec
-    pkgs = module_name.split(".")
-    spec = importlib.util.find_spec(pkgs[0])
-    assert spec is not None and spec.origin is not None
-
-    # even namespace packages are installed in the path, so just guess
-    # ... and maybe it works?
-    fname = join(dirname(spec.origin), *pkgs[1:]) + ".py"
-    spec = importlib.util.spec_from_file_location(module_name, fname)
-    module = importlib.util.module_from_spec(spec)
-    sys.modules[module_name] = module
-    spec.loader.exec_module(module)
-    return module
-
-
-class PkgCfg:
-    """
-    Contains information about an installed package that uses robotpy-build
-    """
-
-    def __init__(self, entry_point: EntryPoint):
-        try:
-            self.module = entry_point.load()
-        except Exception as e:
-            try:
-                mod = (
-                    entry_point.module
-                    if hasattr(entry_point, "module")
-                    else entry_point.value
-                )
-                self.module = _hacky_entrypoint_loader(mod)
-            except Exception:
-                raise e
-
-        self.name = entry_point.name
-
-        # could deduce this, but this is probably fine
-        self.libinit_import = getattr(self.module, "libinit_import", None)
-        self.depends = getattr(self.module, "depends", [])
-        self.pypi_package = getattr(self.module, "pypi_package", None)
-        self.package_name = getattr(self.module, "package_name", None)
-        self.static_lib = getattr(self.module, "static_lib", False)
-
-    def get_include_dirs(self) -> Optional[List[str]]:
-        """
-        Include directories provided by this module
-        """
-        fn = getattr(self.module, "get_include_dirs", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_library_dirs(self) -> Optional[List[str]]:
-        """
-        Directories where libraries reside
-        """
-        fn = getattr(self.module, "get_library_dirs", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_library_dirs_rel(self) -> Optional[List[str]]:
-        """
-        Directories where libraries reside, relative to package
-        """
-        fn = getattr(self.module, "get_library_dirs_rel", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_library_names(self) -> Optional[List[str]]:
-        """
-        Names of libraries provided (for linking)
-        """
-        fn = getattr(self.module, "get_library_names", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_extra_objects(self) -> Optional[List[str]]:
-        """
-        Names of extra objects to link in
-        """
-        fn = getattr(self.module, "get_extra_objects", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_library_full_names(self) -> Optional[List[str]]:
-        """
-        Full names of libraries provided (needed for OSX support)
-        """
-        fn = getattr(self.module, "get_library_full_names", None)
-        if fn:
-            return fn()
-        return None
-
-    def get_type_casters(self, casters: Dict[str, str]) -> None:
-        """
-        Legacy type caster information
-        """
-        t = {}
-        r = self.get_type_casters_cfg(t)
-        for k, v in t.items():
-            if "hdr" in v:
-                casters[k] = v["hdr"]
-        return r
-
-    def get_type_casters_cfg(self, casters: Dict[str, str]) -> None:
-        """
-        Type caster headers provided
-
-        key: type name
-        value: a dict with keys:
-            hdr: header file
-            darg: force default arg
-
-        """
-        fn = getattr(self.module, "get_type_casters_cfg", None)
-        if fn:
-            return fn(casters)
-        fn = getattr(self.module, "get_type_casters", None)
-        if fn:
-            t = {}
-            r = fn(t)
-            casters.update({k: {"hdr": v} for k, v in t.items()})
-            return r
-
-
-class PkgCfgProvider:
-    """
-    Retrieves information about robotpy-build packages
-
-    Warning: Not to be confused with 'pkg-config'
-    """
-
-    def __init__(self):
-        self.pkgs = {}
-
-    def detect_pkgs(self) -> None:
-        """
-        Detect and load packages under the robotpybuild entry point group.
-        Only loads packages that are dependencies.
-        """
-        deps_names = set().union(*[pkg.depends for pkg in self.pkgs.values()])
-        ep_ret = entry_points()
-
-        # Python 3.8/3.9
-        if isinstance(ep_ret, dict):
-            all_entry_points = ep_ret.get("robotpybuild", [])
-        else:
-            all_entry_points = [e for e in entry_points() if e.group == "robotpybuild"]
-
-        # Only load the dependencies of the package we're building.
-        # If we load the [package being built], then the current build will fail.
-        # If we load a package that depends on the [package being built],
-        # then the [package being built] will be loaded and the current build will fail.
-        run_loop = True
-        while run_loop:
-            run_loop = False
-            for ep in all_entry_points:
-                if ep.name in self.pkgs:  # Prevents loading the package being built
-                    continue
-                if ep.name not in deps_names and ep.name != "robotpy-build":
-                    continue
-                try:
-                    pkg = PkgCfg(ep)
-                except Exception as e:
-                    warnings.warn(f"Error loading entry point {ep.name}: {e}")
-                else:
-                    self.add_pkg(pkg)
-                    deps_names |= set(pkg.depends)
-                    run_loop = True
-
-    def add_pkg(self, pkg: PkgCfg) -> None:
-        self.pkgs[pkg.name] = pkg
-
-    def get_pkg(self, name: str) -> PkgCfg:
-        try:
-            return self.pkgs[name]
-        except KeyError:
-            raise KeyError("robotpy-build package '%s' not installed" % name)
-
-    def get_all_deps(self, name: str) -> Set[PkgCfg]:
-        deps: Set[PkgCfg] = set()
-
-        def _get(name: str):
-            pkg = self.get_pkg(name)
-            if pkg in deps:
-                return pkg
-            deps.add(pkg)
-            for dep in pkg.depends:
-                _get(dep)
-            return pkg
-
-        pkg = _get(name)
-        deps.remove(pkg)
-        return deps
diff --git a/robotpy_build/relink_libs.py b/robotpy_build/relink_libs.py
deleted file mode 100644
index ddbaa823..00000000
--- a/robotpy_build/relink_libs.py
+++ /dev/null
@@ -1,177 +0,0 @@
-"""
-    On OSX, the loader does not look at the current process to load
-    dylibs -- it insists on finding them itself, so we have to fixup
-    our binaries such that they resolve correctly.
-    
-    Two cases we need to deal with
-    - Local development/installation
-    - Building a wheel for pypi
-    
-    In development, we assume things are installed exactly where they will be
-    at runtime.
-      -> @loader_path/{relpath(final_location, dep_path)}
-    
-    For pypi wheels, we assume that installation is in site-packages, and
-    so are the libraries that this lib depends on.
-      -> @loader_path/{relpath(final_siterel, dep_siterel)}
-    
-    Notice these are the same IF you only build wheels in a virtualenv
-    that only has its dependencies installed in site-packages
-    
-
-    .. warning:: This will only work for the environment it's compiled in!
-                 This basically means don't compile wheels in your development 
-                 environment, use a clean environment instead
-
-"""
-
-from delocate.delocating import filter_system_libs
-from delocate.tools import get_install_names, set_install_name as _set_install_name
-
-from os import path
-
-from typing import Dict, List, Optional, Tuple
-
-from .pkgcfg_provider import PkgCfg, PkgCfgProvider
-
-
-def set_install_name(file: str, old_install_name: str, new_install_name: str):
-    """Change the install name for a library
-
-    :param file: path to a executable/library file
-    :param old_install_name: current path to dependency
-    :param new_install_name: new path to dependency
-    """
-
-    # This function just calls delocate's set_install_name which uses install_name_tool.
-    # This function exists in case we want to change the implementation.
-
-    _set_install_name(file, old_install_name, new_install_name)
-    print("Relink:", file, ":", old_install_name, "->", new_install_name)
-
-
-# Common data structure used here
-# - key is basename of library file
-# - Value tuple has two pieces:
-#   - 0: Where the library file really is right now
-#   - 1: Where the library file will be when installed
-LibsDict = Dict[str, Tuple[str, str]]
-
-
-def _resolve_libs(libpaths: Optional[List[str]], libname_full: str, libs: LibsDict):
-    if not libpaths:
-        return
-    for libpath in libpaths:
-        p = path.join(libpath, libname_full)
-        if path.exists(p):
-            libs[libname_full] = (p, p)
-            return
-
-
-def _resolve_libs_in_self(dep: PkgCfg, install_root: str, libs: LibsDict):
-    pkgroot = path.join(install_root, *dep.package_name.split("."))
-    full_names = dep.get_library_full_names()
-    if not full_names:
-        return
-    for libname_full in full_names:
-        for ld, ldr in zip(dep.get_library_dirs(), dep.get_library_dirs_rel()):
-            p = path.join(ld, libname_full)
-            if path.exists(p):
-                # stores where it will exist
-                libs[libname_full] = (p, path.join(pkgroot, ldr, libname_full))
-                break
-
-
-def _resolve_dependencies(
-    install_root: str, pkg: PkgCfg, pkgcfg: PkgCfgProvider, libs: LibsDict
-):
-    # first, gather all possible libraries by retrieving this package and
-    # it's dependents. We're not concerned about redirecting non-robotpy-build
-    # libraries, since we can't control where those are located
-    deps = pkgcfg.get_all_deps(pkg.name)
-
-    pypi_package = pkg.pypi_package
-
-    for dep in deps:
-        # dependencies are in their installed location
-        # .. except when they're in the same wheel
-        if pypi_package and dep.pypi_package == pypi_package:
-            _resolve_libs_in_self(dep, install_root, libs)
-        else:
-            libdirs = dep.get_library_dirs()
-            full_names = dep.get_library_full_names()
-            if full_names:
-                for libname_full in full_names:
-                    _resolve_libs(libdirs, libname_full, libs)
-
-
-def _fix_libs(to_fix: LibsDict, libs: LibsDict):
-    for current_libpath, install_libpath in to_fix.values():
-        for lib in get_install_names(current_libpath):
-            libb = path.basename(lib)
-            libdata = libs.get(libb)
-            if libdata:
-                desired_path = path.relpath(libdata[1], path.dirname(install_libpath))
-                desired_path = "@loader_path/" + desired_path
-                set_install_name(current_libpath, lib, desired_path)
-            elif filter_system_libs(lib):
-                raise ValueError(
-                    "unresolved lib %s: maybe a dependency is missing?" % lib
-                )
-
-
-def relink_libs(install_root: str, pkg: PkgCfg, pkgcfg: PkgCfgProvider):
-    """
-    Given a package, relink it's external libraries
-
-    :param install_root: Where this package will be (is) installed
-    :param pkg: Object that implements pkgcfg for this wrapper
-    :param pkgcfg: robotpy-build pkgcfg resolver
-    """
-    libs: LibsDict = {}
-    _resolve_dependencies(install_root, pkg, pkgcfg, libs)
-    to_fix: LibsDict = {}
-    _resolve_libs_in_self(pkg, install_root, to_fix)
-    libs.update(to_fix)
-    _fix_libs(to_fix, libs)
-
-
-def relink_extension(
-    install_root: str,
-    extension_path: str,
-    extension_rel: str,
-    pkg: PkgCfg,
-    pkgcfg: PkgCfgProvider,
-) -> LibsDict:
-    """
-    Given an extension, relink it
-
-    :param install_root: Where this package will be (is) installed
-    :param extension_path: full path to extension library
-    :param extension_rel: Relative path to library where it will be (is) installed
-    :param pkg: Object that implements pkgcfg for this wrapper
-    :param pkgcfg: robotpy-build pkgcfg resolver
-    """
-    libs: LibsDict = {}
-    _resolve_dependencies(install_root, pkg, pkgcfg, libs)
-    _resolve_libs_in_self(pkg, install_root, libs)
-
-    to_fix = {
-        path.basename(extension_path): (
-            extension_path,
-            path.join(install_root, extension_rel),
-        )
-    }
-    _fix_libs(to_fix, libs)
-    return libs
-
-
-def resolve_libs(
-    install_root: str,
-    pkg: PkgCfg,
-    pkgcfg: PkgCfgProvider,
-):
-    libs: LibsDict = {}
-    _resolve_dependencies(install_root, pkg, pkgcfg, libs)
-    _resolve_libs_in_self(pkg, install_root, libs)
-    return libs
diff --git a/robotpy_build/setup.py b/robotpy_build/setup.py
deleted file mode 100644
index 75912c29..00000000
--- a/robotpy_build/setup.py
+++ /dev/null
@@ -1,241 +0,0 @@
-import os
-from os.path import abspath, exists, join
-from setuptools import find_packages, setup as _setup
-from setuptools_scm import get_version
-import tomli
-
-try:
-    from wheel.bdist_wheel import bdist_wheel as _bdist_wheel
-
-    class bdist_wheel(_bdist_wheel):
-        def finalize_options(self):
-            _bdist_wheel.finalize_options(self)
-            self.root_is_pure = False
-
-except ImportError:
-    bdist_wheel = None  # type: ignore
-
-from .autowrap.writer import WrapperWriter
-
-from .command.build_py import BuildPy
-from .command.build_dl import BuildDl
-from .command.build_gen import BuildGen
-from .command.build_ext import BuildExt
-from .command.build_pyi import BuildPyi
-from .command.develop import Develop
-from .command.update_init import UpdateInit
-
-try:
-    from .command.editable_wheel import EditableWheel
-except ImportError:
-    EditableWheel = None  # type: ignore
-
-from .config.pyproject_toml import RobotpyBuildConfig
-
-from .maven import convert_maven_to_downloads
-from .overrides import apply_overrides
-from .pkgcfg_provider import PkgCfgProvider
-from .platforms import get_platform, get_platform_override_keys
-from .static_libs import StaticLib
-from .wrapper import Wrapper
-
-
-class Setup:
-    """
-    Hacky wrapper around setuptools because it's easier than copy/pasting
-    this stuff to a million setup.py files
-    """
-
-    def __init__(self):
-        self.root = abspath(os.getcwd())
-        self.wrappers = []
-        self.static_libs = []
-
-        self.platform = get_platform()
-
-        project_fname = join(self.root, "pyproject.toml")
-
-        try:
-            with open(project_fname, "rb") as fp:
-                self.pyproject = tomli.load(fp)
-        except FileNotFoundError as e:
-            raise ValueError("current directory is not a robotpy-build project") from e
-
-        self.project_dict = self.pyproject.get("tool", {}).get("robotpy-build", {})
-
-        # Overrides are applied before pydantic does processing, so that
-        # we can easily override anything without needing to make the
-        # pydantic schemas messy with needless details
-        override_keys = get_platform_override_keys(self.platform)
-        apply_overrides(self.project_dict, override_keys)
-
-        try:
-            self.project = RobotpyBuildConfig(**self.project_dict)
-        except Exception as e:
-            raise ValueError(
-                f"robotpy-build configuration in pyproject.toml is incorrect"
-            ) from e
-
-        # Remove deprecated 'generate' data and migrate
-        for wname, wrapper in self.project.wrappers.items():
-            if wrapper.generate:
-                if wrapper.autogen_headers:
-                    raise ValueError(
-                        "must not specify 'generate' and 'autogen_headers'"
-                    )
-                autogen_headers = {}
-                for l in wrapper.generate:
-                    for name, header in l.items():
-                        if name in autogen_headers:
-                            raise ValueError(
-                                f"{wname}.generate: duplicate key '{name}'"
-                            )
-                        autogen_headers[name] = header
-                wrapper.autogen_headers = autogen_headers
-                wrapper.generate = None
-
-        # Shared wrapper writer instance
-        self.wwriter = WrapperWriter()
-
-    @property
-    def base_package(self):
-        return self.project.base_package
-
-    @property
-    def base_package_path(self):
-        return join(self.root, *self.base_package.split("."))
-
-    @property
-    def git_dir(self):
-        return join(self.root, ".git")
-
-    @property
-    def pypi_package(self) -> str:
-        return self.setup_kwargs["name"]
-
-    def prepare(self):
-        self.setup_kwargs = self.project_dict.get("metadata", {})
-        self.setup_kwargs["zip_safe"] = False
-        self.setup_kwargs["include_package_data"] = True
-        self.setup_kwargs["python_requires"] = ">=3.8"
-
-        self._generate_long_description()
-
-        # get_version expects the directory to exist
-        base_package_path = self.base_package_path
-        os.makedirs(base_package_path, exist_ok=True)
-        this_version = get_version(
-            write_to=join(base_package_path, "version.py"),
-            fallback_version="master",
-            search_parent_directories=True,
-        )
-        self.setup_kwargs["version"] = this_version
-
-        # Support ==THIS_VERSION
-        install_requires = self.setup_kwargs.get("install_requires")
-        if install_requires:
-
-            def _xform(v: str):
-                if v.endswith("==THIS_VERSION"):
-                    v = f"{v[:-14]}=={this_version}"
-                return v
-
-            self.setup_kwargs["install_requires"] = list(map(_xform, install_requires))
-
-        self.pkgcfg = PkgCfgProvider()
-
-        self._collect_static_libs()
-        self._collect_wrappers()
-
-        self.pkgcfg.detect_pkgs()
-
-        self.setup_kwargs["cmdclass"] = {
-            "build_py": BuildPy,
-            "build_dl": BuildDl,
-            "build_gen": BuildGen,
-            "build_ext": BuildExt,
-            "build_pyi": BuildPyi,
-            "develop": Develop,
-            "update_init": UpdateInit,
-        }
-        if EditableWheel:
-            self.setup_kwargs["cmdclass"]["editable_wheel"] = EditableWheel
-        if bdist_wheel:
-            self.setup_kwargs["cmdclass"]["bdist_wheel"] = bdist_wheel
-        for cls in self.setup_kwargs["cmdclass"].values():
-            cls.wrappers = self.wrappers
-            cls.static_libs = self.static_libs
-            cls.rpybuild_pkgcfg = self.pkgcfg
-        BuildPyi.base_package = self.base_package
-        UpdateInit.update_list = self.project.update_init
-
-        # We already know some of our packages, so collect those in addition
-        # to using find_packages()
-        packages = {w.package_name for w in self.wrappers}
-        packages.update(find_packages())
-        self.setup_kwargs["packages"] = list(packages)
-
-    def _generate_long_description(self):
-        readme_rst = join(self.root, "README.rst")
-        readme_md = join(self.root, "README.md")
-        if exists(readme_rst):
-            self.setup_kwargs["long_description_content_type"] = "text/x-rst"
-            with open(readme_rst) as fp:
-                self.setup_kwargs["long_description"] = fp.read()
-
-        elif exists(readme_md):
-            self.setup_kwargs["long_description_content_type"] = "text/markdown"
-            with open(readme_md) as fp:
-                self.setup_kwargs["long_description"] = fp.read()
-
-    def _collect_wrappers(self):
-        ext_modules = []
-
-        for package_name, cfg in self.project.wrappers.items():
-            if cfg.ignore:
-                continue
-            self._fix_downloads(cfg, False)
-            w = Wrapper(package_name, cfg, self, self.wwriter)
-            self.wrappers.append(w)
-            self.pkgcfg.add_pkg(w)
-
-            if w.extension:
-                ext_modules.append(w.extension)
-
-        if ext_modules:
-            self.setup_kwargs["ext_modules"] = ext_modules
-
-    def _collect_static_libs(self):
-        for name, cfg in self.project.static_libs.items():
-            if cfg.ignore:
-                continue
-            self._fix_downloads(cfg, True)
-            if not cfg.download:
-                raise ValueError(f"static_lib {name} must specify downloads")
-            s = StaticLib(name, cfg, self)
-            self.static_libs.append(s)
-            self.pkgcfg.add_pkg(s)
-
-    def _fix_downloads(self, cfg, static: bool):
-        # maven is just a special case of a download
-        if cfg.maven_lib_download:
-            downloads = convert_maven_to_downloads(cfg.maven_lib_download, static)
-            cfg.maven_lib_download = None
-            if cfg.download:
-                cfg.download.append(downloads)
-            else:
-                cfg.download = downloads
-
-        if cfg.download:
-            for dl in cfg.download:
-                dl._update_with_platform(self.platform)
-
-    def run(self):
-        # assemble all the pieces and make it work
-        _setup(**self.setup_kwargs)
-
-
-def setup():
-    s = Setup()
-    s.prepare()
-    s.run()
diff --git a/robotpy_build/static_libs.py b/robotpy_build/static_libs.py
deleted file mode 100644
index 0f0680bc..00000000
--- a/robotpy_build/static_libs.py
+++ /dev/null
@@ -1,114 +0,0 @@
-import os
-from os.path import join
-import posixpath
-import shutil
-from typing import Any, Dict, List, Optional
-
-from .download import download_and_extract_zip
-from .config.pyproject_toml import Download, StaticLibConfig
-
-
-class StaticLib:
-    # implements pkgcfg
-
-    def __init__(self, name: str, cfg: StaticLibConfig, setup):
-        self.package_name = name
-        self.name = name
-        self.cfg = cfg
-        self.static_lib = True
-        self.libinit_import = None
-        self.pypi_package = None
-        # TODO
-        self.depends = []
-
-        self.platform = setup.platform
-
-        self.root: Optional[os.PathLike] = None
-        self.incdir: Optional[str] = None
-        self.libdir: Optional[str] = None
-
-    def set_root(self, root: os.PathLike) -> None:
-        self.root = root
-        self.libdir = join(self.root, self.name, "lib")
-        self.incdir = join(self.root, self.name, "include")
-
-    def get_include_dirs(self) -> Optional[List[str]]:
-        if self.incdir is None:
-            return
-
-        includes = [self.incdir]
-        if self.cfg.download:
-            for dl in self.cfg.download:
-                if dl.extra_includes:
-                    includes += [join(self.incdir, inc) for inc in dl.extra_includes]
-        return includes
-
-    def get_library_dirs(self) -> Optional[List[str]]:
-        if self.libdir:
-            return [self.libdir]
-
-    def get_library_dirs_rel(self) -> Optional[List[str]]:
-        pass
-
-    def get_library_names(self) -> Optional[List[str]]:
-        # don't do this except on Windows
-        if self.platform.os != "windows":
-            return
-
-        return self._get_libnames(useext=False)
-
-    def get_library_full_names(self) -> Optional[List[str]]:
-        pass
-
-    def get_extra_objects(self) -> Optional[List[str]]:
-        if self.platform.os == "windows":
-            return
-
-        if self.libdir:
-            return [join(self.libdir, lib) for lib in self._get_libnames()]
-
-    def get_type_casters_cfg(self, casters: Dict[str, Dict[str, Any]]) -> None:
-        pass
-
-    def _get_dl_libnames(self, dl: Download, useext=True):
-        ext = ""
-        if useext:
-            ext = self.platform.staticext
-        return [f"{self.platform.libprefix}{lib}{ext}" for lib in dl.libs]
-
-    def _get_libnames(self, useext=True):
-        libs = []
-        for dl in self.cfg.download:
-            if dl.libs:
-                libs += self._get_dl_libnames(dl, useext)
-        return libs
-
-    def on_build_dl(self, cache: str, libdir: str):
-        self.set_root(libdir)
-
-        shutil.rmtree(self.libdir, ignore_errors=True)
-        shutil.rmtree(self.incdir, ignore_errors=True)
-
-        os.makedirs(self.libdir)
-
-        for dl in self.cfg.download:
-            if dl.sources is not None:
-                raise ValueError(f"{dl.url}: cannot specify sources in static lib")
-
-            if dl.libs is None:
-                if dl.incdir is None:
-                    raise ValueError(f"{dl.url}: must specify libs in static lib")
-                to = {}
-            else:
-                to = {
-                    posixpath.join(dl.libdir, libname): join(self.libdir, libname)
-                    for libname in self._get_dl_libnames(dl)
-                }
-
-            if dl.incdir is not None:
-                to[dl.incdir] = self.incdir
-
-            if dl.dlopenlibs is not None:
-                raise ValueError(f"{dl.url}: cannot specify dlopenlibs in static lib")
-
-            download_and_extract_zip(dl.url, to, cache)
diff --git a/robotpy_build/wrapper.py b/robotpy_build/wrapper.py
deleted file mode 100644
index 33e72552..00000000
--- a/robotpy_build/wrapper.py
+++ /dev/null
@@ -1,829 +0,0 @@
-import glob
-import json
-import inspect
-import os
-from os.path import (
-    abspath,
-    basename,
-    dirname,
-    exists,
-    isdir,
-    join,
-    normpath,
-    relpath,
-    sep,
-    splitext,
-)
-import pathlib
-import posixpath
-import shutil
-import sysconfig
-import toposort
-from typing import Any, Dict, List, Optional, Set, Tuple
-
-from urllib.error import HTTPError
-import dataclasses
-
-from setuptools import Extension
-
-from cxxheaderparser.options import ParserOptions
-from cxxheaderparser import preprocessor
-
-
-from .download import download_and_extract_zip
-from .config.pyproject_toml import PatchInfo, WrapperConfig, Download
-
-from .autowrap.cxxparser import parse_header
-from .autowrap.generator_data import GeneratorData, MissingReporter
-from .autowrap.writer import WrapperWriter
-
-from .config.autowrap_yml import AutowrapConfigYaml
-from .config.dev_yml import get_dev_config
-from .config.pyproject_toml import WrapperConfig, Download
-
-# TODO: eventually provide native preprocessor by default and allow it
-#       to be enabled/disabled per-file just in case
-if os.getenv("RPYBUILD_PP_GCC") == "1":
-    # GCC preprocessor can be 10x faster than pcpp for very complex files
-    def make_preprocessor(*args, **kwargs):
-        return preprocessor.make_gcc_preprocessor(print_cmd=False, *args, **kwargs)
-
-else:
-    make_preprocessor = preprocessor.make_pcpp_preprocessor
-
-
-class Wrapper:
-    """
-    Wraps downloading bindings and generating them
-    """
-
-    # Used during preprocessing
-    # -> should we change this based on what flags the compiler supports?
-    _cpp_version = "__cplusplus 201703L"
-
-    def __init__(self, package_name, cfg: WrapperConfig, setup, wwriter: WrapperWriter):
-        self.package_name = package_name
-        self.cfg = cfg
-        self.wwriter = wwriter
-
-        self.setup_root = setup.root
-        self.pypi_package = setup.pypi_package
-        self.root = join(setup.root, *package_name.split("."))
-
-        # must match PkgCfg.name
-        self.name = cfg.name
-        self.static_lib = False
-
-        # Compute the extension name, even if we don't create one
-        extname = cfg.extension
-        if not extname:
-            extname = f"_{cfg.name}"
-
-        # must match PkgCfg.libinit_import
-        if cfg.libinit:
-            libinit_py = cfg.libinit
-            if libinit_py == "__init__.py":
-                self.libinit_import = package_name
-            else:
-                pkg = splitext(libinit_py)[0]
-                self.libinit_import = f"{package_name}.{pkg}"
-        else:
-            libinit_py = f"_init{extname}.py"
-            self.libinit_import = f"{package_name}._init{extname}"
-
-        self.libinit_import_py = join(self.root, libinit_py)
-
-        self.platform = setup.platform
-        self.pkgcfg = setup.pkgcfg
-
-        # Used by pkgcfg
-        self.depends = self.cfg.depends
-
-        # Files that are generated AND need to be in the final wheel. Used by build_py
-        self.additional_data_files: List[str] = []
-
-        self._all_deps = None
-
-        self._gen_includes = []
-
-        self.extension = None
-        if self.cfg.sources or self.cfg.autogen_headers:
-            define_macros = [("RPYBUILD_MODULE_NAME", extname)] + [
-                tuple(d.split(" ")) for d in self.platform.defines
-            ]
-            define_macros += [tuple(m.split(" ", 1)) for m in self.cfg.pp_defines]
-
-            # extensions just hold data about what to actually build, we can
-            # actually modify extensions all the way up until the build
-            # really happens
-            extname_full = f"{self.package_name}.{extname}"
-            self.extension = Extension(
-                extname_full,
-                self.cfg.sources,
-                define_macros=define_macros,
-                language="c++",
-            )
-
-            # Add self to extension so that build_ext can query it on OSX
-            self.extension.rpybuild_wrapper = self
-
-            # Used if the maven download fails
-            self.supported_platforms = setup.project.supported_platforms
-
-        if self.cfg.autogen_headers and not self.cfg.generation_data:
-            raise ValueError(
-                "generation_data must be specified when autogen_headers/generate is specified"
-            )
-
-        # Setup an entry point (written during build_clib)
-        entry_point = f"{self.cfg.name} = {self.package_name}.pkgcfg"
-
-        setup_kwargs = setup.setup_kwargs
-        ep = setup_kwargs.setdefault("entry_points", {})
-        ep.setdefault("robotpybuild", []).append(entry_point)
-
-        self.incdir = join(self.root, "include")
-        self.rpy_incdir = join(self.root, "rpy-include")
-
-        self.dev_config = get_dev_config(self.name)
-
-        self._update_addl_data_files()
-
-    def _extract_zip_to(self, dl: Download, dst, cache):
-        try:
-            download_and_extract_zip(dl.url, dst, cache)
-        except HTTPError as e:
-            # Check for a 404 error and raise an error if the platform isn't supported.
-            if e.code != 404:
-                raise e
-            else:
-                platform_dict = dataclasses.asdict(self.platform)
-
-                os = platform_dict["os"]
-                arch = platform_dict["arch"]
-
-                is_os_supported = False
-                is_arch_supported = False
-
-                for supp_plat in self.supported_platforms:
-                    if supp_plat.os is None or supp_plat.os == os:
-                        is_os_supported = True
-                        if supp_plat.arch is None or supp_plat.arch == arch:
-                            is_arch_supported = True
-
-                if not (is_os_supported and is_arch_supported):
-                    if arch == "x86":
-                        arch = "32-bit"
-                    elif arch == "x86-64":
-                        arch = "64-bit"
-
-                    if os == "osx":
-                        os = "macOS"
-
-                    if not is_os_supported:
-                        arch = ""
-
-                    msg_plat = "{}{}{}".format(arch, " " if arch != "" else "", os)
-
-                    err_msg = "{} is not supported on {}!".format(
-                        self.pypi_package, msg_plat
-                    )
-
-                    raise OSError(err_msg)
-                raise e
-
-    def _add_addl_data_file(self, fullpath):
-        if not isdir(fullpath):
-            self.additional_data_files.append(relpath(fullpath, self.root))
-
-    # pkgcfg interface
-    def get_include_dirs(self) -> List[str]:
-        includes = [self.incdir, self.rpy_incdir]
-        if self.cfg.download:
-            for dl in self.cfg.download:
-                if dl.extra_includes:
-                    includes += [join(self.incdir, inc) for inc in dl.extra_includes]
-        for h in self.cfg.extra_includes:
-            includes.append(join(self.setup_root, normpath(h)))
-        return includes
-
-    def get_library_dirs(self) -> Optional[List[str]]:
-        if self.get_library_full_names():
-            return [join(self.root, "lib")]
-        return []
-
-    def get_library_dirs_rel(self) -> Optional[List[str]]:
-        if self.get_library_full_names():
-            return ["lib"]
-        return []
-
-    def get_library_names(self) -> Optional[List[str]]:
-        libs = []
-        if self.cfg.download:
-            for dl in self.cfg.download:
-                if dl.libs:
-                    libs += dl.libs
-        return libs
-
-    def get_library_full_names(self) -> Optional[List[str]]:
-        if not self.cfg.download:
-            return []
-
-        dlopen_libnames = self.get_dlopen_library_names()
-
-        libnames_full = []
-
-        for dl in self.cfg.download:
-            libext = dl.libexts.get(self.platform.libext, self.platform.libext)
-            if dl.libs:
-                for lib in dl.libs:
-                    if lib not in dlopen_libnames:
-                        libnames_full.append(f"{self.platform.libprefix}{lib}{libext}")
-            if dl.dlopenlibs:
-                libnames_full += [
-                    f"{self.platform.libprefix}{lib}{libext}" for lib in dl.dlopenlibs
-                ]
-
-        return libnames_full
-
-    def get_dlopen_library_names(self) -> Optional[List[str]]:
-        libs = []
-        if self.cfg.download:
-            for dl in self.cfg.download:
-                if dl.dlopenlibs:
-                    libs += dl.dlopenlibs
-        return libs
-
-    def get_extra_objects(self) -> Optional[List[str]]:
-        pass
-
-    def get_type_casters_cfg(self, casters: Dict[str, Dict[str, Any]]) -> None:
-        for ccfg in self.cfg.type_casters:
-            cfg = {"hdr": ccfg.header}
-            if ccfg.default_arg_cast:
-                cfg["darg"] = True
-
-            for typ in ccfg.types:
-                casters[typ] = cfg
-
-    def _update_addl_data_files(self) -> List[str]:
-        headers = set()
-        for ccfg in self.cfg.type_casters:
-            headers.add(ccfg.header)
-
-        if headers:
-            includes = self.get_include_dirs()
-            if includes:
-                for hdr in headers:
-                    for p in includes:
-                        fpath = join(p, hdr)
-                        if exists(fpath):
-                            self._add_addl_data_file(fpath)
-
-    def all_deps(self):
-        if self._all_deps is None:
-            self._all_deps = self.pkgcfg.get_all_deps(self.name)
-        return self._all_deps
-
-    def _all_includes(self, include_rpyb):
-        includes = self.get_include_dirs()
-        for dep in self.all_deps():
-            dep_inc = dep.get_include_dirs()
-            if dep_inc:
-                includes.extend(dep_inc)
-        if include_rpyb:
-            includes.extend(self.pkgcfg.get_pkg("robotpy-build").get_include_dirs())
-        return includes
-
-    def _generation_search_path(self):
-        return [self.root] + self._all_includes(False)
-
-    def _all_library_dirs(self):
-        libs = self.get_library_dirs()
-        for dep in self.cfg.depends:
-            libdirs = self.pkgcfg.get_pkg(dep).get_library_dirs()
-            if libdirs:
-                libs.extend(libdirs)
-        return libs
-
-    def _all_library_names(self):
-        libs = list(
-            set(self.get_library_names()) | set(self.get_dlopen_library_names())
-        )
-        for dep in self.cfg.depends:
-            pkg = self.pkgcfg.get_pkg(dep)
-            libnames = pkg.get_library_names()
-            if libnames:
-                libs.extend(libnames)
-        return list(reversed(libs))
-
-    def _all_extra_objects(self):
-        libs = []
-        for dep in self.cfg.depends:
-            pkg = self.pkgcfg.get_pkg(dep)
-            libnames = pkg.get_extra_objects()
-            if libnames:
-                libs.extend(libnames)
-        return list(reversed(libs))
-
-    def _all_casters(self):
-        casters = {}
-        for dep in self.all_deps():
-            dep.get_type_casters_cfg(casters)
-        self.pkgcfg.get_pkg("robotpy-build").get_type_casters_cfg(casters)
-        self.get_type_casters_cfg(casters)
-
-        # make each configuration unique
-        for k, v in list(casters.items()):
-            v = v.copy()
-            v["typename"] = k
-            casters[k] = v
-
-        # add non-namespaced versions of all casters
-        # -> in theory this could lead to a conflict, but
-        #    let's see how it works in practice?
-        for k, v in list(casters.items()):
-            k = k.split("::")[-1]
-            casters[k] = v
-        return casters
-
-    def on_build_dl(self, cache: str, srcdir: str):
-        pkgcfgpy = join(self.root, "pkgcfg.py")
-        srcdir = join(srcdir, self.name)
-
-        try:
-            os.unlink(self.libinit_import_py)
-        except OSError:
-            pass
-
-        try:
-            os.unlink(pkgcfgpy)
-        except OSError:
-            pass
-
-        libnames_full = []
-        all_libs = []
-        downloads = self.cfg.download
-        if downloads:
-            libnames_full, all_libs = self._clean_and_download(downloads, cache, srcdir)
-
-        self._write_libinit_py(libnames_full)
-        self._write_pkgcfg_py(pkgcfgpy, libnames_full)
-
-        return all_libs
-
-    def _apply_patches(self, patches: List[PatchInfo], root: str):
-        import patch
-
-        for p in patches:
-            patch_path = join(self.setup_root, normpath(p.patch))
-            ps = patch.PatchSet()
-            with open(patch_path, "rb") as fp:
-                if not ps.parse(fp):
-                    raise ValueError(f"Error parsing patch '{patch_path}'")
-
-            if not ps.apply(strip=p.strip, root=root):
-                raise ValueError(f"Error applying patch '{patch_path}' to '{root}'")
-
-    def _clean_and_download(
-        self, downloads: List[Download], cache: str, srcdir: str
-    ) -> Tuple[List[str], List[str]]:
-        libdir = join(self.root, "lib")
-        incdir = join(self.root, "include")
-
-        add_libdir = False
-        add_incdir = False
-
-        # Remove downloaded/generated artifacts first
-        shutil.rmtree(libdir, ignore_errors=True)
-        shutil.rmtree(incdir, ignore_errors=True)
-        shutil.rmtree(srcdir, ignore_errors=True)
-
-        dlopen_libnames = self.get_dlopen_library_names()
-        libnames_full = []
-        all_libs = []
-
-        for dl in downloads:
-            # extract the whole thing into a directory when using for sources
-            if dl.sources is not None:
-                download_and_extract_zip(dl.url, srcdir, cache)
-                sources = [join(srcdir, normpath(s)) for s in dl.sources]
-                self.extension.sources.extend(sources)
-                if dl.patches:
-                    self._apply_patches(dl.patches, srcdir)
-            elif dl.sources is not None:
-                raise ValueError("sources must be None if use_sources is False!")
-            elif dl.patches is not None:
-                raise ValueError("patches must be None if use_sources is False!")
-
-            if dl.libs or dl.dlopenlibs:
-                add_libdir = True
-                extract_names = []
-                os.makedirs(libdir)
-
-                libext = dl.libexts.get(self.platform.libext, self.platform.libext)
-                linkext = dl.linkexts.get(self.platform.linkext, self.platform.linkext)
-                if dl.libs:
-                    for lib in dl.libs:
-                        if lib not in dlopen_libnames:
-                            name = f"{self.platform.libprefix}{lib}{libext}"
-                            libnames_full.append(name)
-                            extract_names.append(name)
-                            if libext != linkext:
-                                extract_names.append(
-                                    f"{self.platform.libprefix}{lib}{linkext}"
-                                )
-
-                if dl.dlopenlibs:
-                    dlopen_libnames_full = [
-                        f"{self.platform.libprefix}{lib}{libext}"
-                        for lib in dl.dlopenlibs
-                    ]
-                    libnames_full += dlopen_libnames_full
-                    extract_names += dlopen_libnames_full
-
-                to = {
-                    posixpath.join(dl.libdir, libname): join(libdir, libname)
-                    for libname in extract_names
-                }
-                all_libs.extend(to.values())
-            else:
-                to = {}
-
-            if dl.incdir is not None:
-                to[dl.incdir] = self.incdir
-                add_incdir = True
-
-            download_and_extract_zip(dl.url, to, cache)
-
-            if dl.header_patches:
-                self._apply_patches(dl.header_patches, incdir)
-
-        if add_incdir:
-            for f in glob.glob(join(glob.escape(incdir), "**"), recursive=True):
-                self._add_addl_data_file(f)
-
-        if add_libdir:
-            for f in glob.glob(join(glob.escape(libdir), "**"), recursive=True):
-                self._add_addl_data_file(f)
-
-        return libnames_full, all_libs
-
-    def _write_libinit_py(self, libnames):
-        # This file exists to ensure that any shared library dependencies
-        # are loaded for the compiled extension
-
-        init = inspect.cleandoc(
-            """
-        
-        # This file is automatically generated, DO NOT EDIT
-        # fmt: off
-
-        from os.path import abspath, join, dirname, exists
-        _root = abspath(dirname(__file__))
-
-        ##IMPORTS##
-
-        """
-        )
-
-        init += "\n"
-
-        if libnames:
-            if self.platform.os == "osx":
-                init += "from ctypes import CDLL, RTLD_GLOBAL\n\n"
-            else:
-                init += "from ctypes import cdll\n\n"
-
-            for libname in libnames:
-                init += "try:\n"
-                if self.platform.os == "osx":
-                    init += f'    _lib = CDLL(join(_root, "lib", "{libname}"), mode=RTLD_GLOBAL)\n'
-                else:
-                    init += f'    _lib = cdll.LoadLibrary(join(_root, "lib", "{libname}"))\n'
-
-                init += "except FileNotFoundError:\n"
-                init += f'    if not exists(join(_root, "lib", "{libname}")):\n'
-                init += f'        raise FileNotFoundError("{libname} was not found on your system. Is this package correctly installed?")\n'
-                if self.platform.os == "windows":
-                    init += f'    raise Exception("{libname} could not be loaded. Do you have Visual Studio C++ Redistributible 2019 installed?")\n\n'
-                else:
-                    init += f'    raise FileNotFoundError("{libname} could not be loaded. There is a missing dependency.")\n\n'
-        imports = []
-        for dep in self.cfg.depends:
-            pkg = self.pkgcfg.get_pkg(dep)
-            if pkg.libinit_import:
-                imports.append(pkg.libinit_import)
-
-        if imports:
-            imports = "# runtime dependencies\nimport " + "\nimport ".join(imports)
-        else:
-            imports = ""
-
-        init = init.replace("##IMPORTS##", imports)
-
-        with open(self.libinit_import_py, "w") as fp:
-            fp.write(init)
-
-        self._add_addl_data_file(self.libinit_import_py)
-
-    def _write_pkgcfg_py(self, fname, libnames_full):
-        library_dirs = "[]"
-        library_dirs_rel = []
-        library_names = self.get_library_names()
-        if library_names:
-            library_dirs = '[join(_root, "lib")]'
-            library_dirs_rel = ["lib"]
-
-        deps = []
-        for dep in self.cfg.depends:
-            pkg = self.pkgcfg.get_pkg(dep)
-            if not pkg.static_lib:
-                deps.append(dep)
-
-        # write pkgcfg.py
-        pkgcfg = inspect.cleandoc(
-            f"""
-        # fmt: off
-        # This file is automatically generated, DO NOT EDIT
-
-        from os.path import abspath, join, dirname
-        _root = abspath(dirname(__file__))
-
-        libinit_import = "{self.libinit_import}"
-        depends = {repr(deps)}
-        pypi_package = {repr(self.pypi_package)}
-
-        def get_include_dirs():
-            return [join(_root, "include"), join(_root, "rpy-include")##EXTRAINCLUDES##]
-
-        def get_library_dirs():
-            return {library_dirs}
-
-        def get_library_dirs_rel():
-            return {repr(library_dirs_rel)}
-        
-        def get_library_names():
-            return {repr(library_names)}
-
-        def get_library_full_names():
-            return {repr(libnames_full)}
-        """
-        )
-
-        extraincludes = ""
-        if self.cfg.extra_includes:
-            # these are relative to the root of the project, need
-            # to resolve the path relative to the package
-            pth = join(*self.package_name.split("."))
-
-            for h in self.cfg.extra_includes:
-                h = '", "'.join(relpath(normpath(h), pth).split(sep))
-                extraincludes += f', join(_root, "{h}")'
-
-        pkgcfg = pkgcfg.replace("##EXTRAINCLUDES##", extraincludes)
-
-        type_casters = {}
-        self.get_type_casters_cfg(type_casters)
-        if type_casters:
-            pkgcfg += "\n\n"
-            pkgcfg += inspect.cleandoc(
-                f"""
-
-            def get_type_casters_cfg(casters):
-                casters.update({repr(type_casters)})
-
-            def get_type_casters(casters):
-                t = {{}}
-                get_type_casters_cfg(t)
-                for k, v in t.items():
-                    if "hdr" in v:
-                        casters[k] = v["hdr"]
-            """
-            )
-
-        with open(fname, "w") as fp:
-            fp.write(pkgcfg)
-
-        self._add_addl_data_file(fname)
-
-    def on_build_gen(
-        self, cxx_gen_dir, missing_reporter: Optional[MissingReporter] = None
-    ):
-        if not self.cfg.autogen_headers:
-            return
-
-        cxx_gen_dir = join(cxx_gen_dir, self.name)
-
-        if missing_reporter:
-            report_only = True
-        else:
-            report_only = False
-            missing_reporter = MissingReporter()
-
-        hppoutdir = join(self.rpy_incdir, "rpygen")
-
-        pp_includes = self._all_includes(True) + [sysconfig.get_path("include")]
-
-        # TODO: only regenerate files if the generated files
-        #       have changed
-        if not report_only:
-            if self.dev_config.only_generate is None:
-                shutil.rmtree(cxx_gen_dir, ignore_errors=True)
-                shutil.rmtree(hppoutdir, ignore_errors=True)
-
-            os.makedirs(cxx_gen_dir, exist_ok=True)
-            os.makedirs(hppoutdir, exist_ok=True)
-
-        per_header = False
-        data_fname = self.cfg.generation_data
-        if self.cfg.generation_data:
-            datapath = join(self.setup_root, normpath(self.cfg.generation_data))
-            per_header = isdir(datapath)
-            if not per_header:
-                data = AutowrapConfigYaml.from_file(datapath)
-        else:
-            data = AutowrapConfigYaml()
-
-        pp_defines = [self._cpp_version] + self.platform.defines + self.cfg.pp_defines
-        casters = self._all_casters()
-
-        # These are written to file to make it easier for dev mode to work
-        classdeps = {}
-
-        if self.dev_config.only_generate is not None:
-            only_generate = {n: True for n in self.dev_config.only_generate}
-        else:
-            only_generate = None
-
-        generation_search_path = self._generation_search_path()
-
-        for name, header in self.cfg.autogen_headers.items():
-            header = normpath(header)
-            for path in generation_search_path:
-                header_path = join(path, header)
-                if exists(header_path):
-                    header_root = pathlib.Path(path)
-                    break
-            else:
-                import pprint
-
-                pprint.pprint(generation_search_path)
-                raise ValueError("could not find " + header)
-
-            if not report_only:
-                classdeps_dst = join(cxx_gen_dir, f"{name}.json")
-                classdeps[name] = classdeps_dst
-
-            if per_header:
-                data_fname = join(datapath, name + ".yml")
-                if not exists(data_fname):
-                    print("WARNING: could not find", data_fname)
-                    data = AutowrapConfigYaml()
-                else:
-                    data = AutowrapConfigYaml.from_file(data_fname)
-
-            if only_generate is not None and not only_generate.pop(name, False):
-                continue
-
-            popts = ParserOptions(
-                preprocessor=make_preprocessor(
-                    defines=pp_defines,
-                    include_paths=pp_includes,
-                    encoding=data.encoding,
-                )
-            )
-
-            gendata = GeneratorData(data)
-
-            try:
-                hctx = parse_header(
-                    name,
-                    pathlib.Path(header_path),
-                    header_root,
-                    gendata,
-                    popts,
-                    casters,
-                    report_only,
-                )
-
-                if not report_only:
-                    generated_sources = self.wwriter.write_files(
-                        hctx, name, cxx_gen_dir, hppoutdir, classdeps_dst
-                    )
-                    self.extension.sources.extend(
-                        [relpath(src, self.setup_root) for src in generated_sources]
-                    )
-            except Exception as e:
-                raise ValueError(f"processing {header}") from e
-
-            gendata.report_missing(data_fname, missing_reporter)
-
-        if only_generate:
-            unused = ", ".join(sorted(only_generate))
-            # raise ValueError(f"only_generate specified unused headers! {unused}")
-            # TODO: make this a warning
-
-        if not report_only:
-            for name, contents in missing_reporter.as_yaml():
-                print("WARNING: some items not in generation yaml for", basename(name))
-                print(contents)
-
-        # generate an inline file that can be included + called
-        if not report_only:
-            self._write_wrapper_hpp(cxx_gen_dir, classdeps)
-            gen_includes = [cxx_gen_dir]
-        else:
-            gen_includes = []
-
-        self._gen_includes = gen_includes
-
-        for f in glob.glob(join(glob.escape(hppoutdir), "*.hpp")):
-            self._add_addl_data_file(f)
-
-    def finalize_extension(self):
-        if self.extension is None:
-            return
-
-        # Add the root to the includes (but only privately)
-        root_includes = [self.root]
-
-        # update the build extension so that build_ext works
-        # use normpath to get rid of .. otherwise gcc is weird
-        self.extension.include_dirs = [
-            normpath(p)
-            for p in (self._all_includes(True) + self._gen_includes + root_includes)
-        ]
-        self.extension.library_dirs = self._all_library_dirs()
-        self.extension.libraries = self._all_library_names()
-        self.extension.extra_objects = self._all_extra_objects()
-
-    def _write_wrapper_hpp(self, outdir, classdeps):
-        decls = []
-        begin_calls = []
-        finish_calls = []
-
-        # Need to ensure that wrapper initialization is called in base order
-        # so we have to toposort it here. The data is written at gen time
-        # to JSON files
-        types2name = {}
-        types2deps = {}
-        ordering = []
-
-        for name, jsonfile in classdeps.items():
-            with open(jsonfile) as fp:
-                dep = json.load(fp)
-
-            # make sure objects without classes are also included!
-            if not dep:
-                ordering.append(name)
-
-            for clsname, bases in dep.items():
-                if clsname in types2name:
-                    raise ValueError(f"{name} ({jsonfile}): duplicate class {clsname}")
-                types2name[clsname] = name
-                types2deps[clsname] = bases[:]
-
-        to_sort: Dict[str, Set[str]] = {}
-        for clsname, bases in types2deps.items():
-            clsname = types2name[clsname]
-            deps = to_sort.setdefault(clsname, set())
-            for base in bases:
-                base = types2name.get(base)
-                if base and base != clsname:
-                    deps.add(base)
-
-        ordering.extend(toposort.toposort_flatten(to_sort, sort=True))
-
-        for name in ordering:
-            decls.append(f"void begin_init_{name}(py::module &m);")
-            decls.append(f"void finish_init_{name}();")
-            begin_calls.append(f"    begin_init_{name}(m);")
-            finish_calls.append(f"    finish_init_{name}();")
-
-        content = (
-            inspect.cleandoc(
-                """
-
-        // This file is autogenerated, DO NOT EDIT
-        #pragma once
-        #include <robotpy_build.h>
-
-        // forward declarations
-        ##DECLS##
-
-        static void initWrapper(py::module &m) {
-        ##BEGIN_CALLS##
-
-        ##FINISH_CALLS##
-        }
-        
-        """
-            )
-            .replace("##DECLS##", "\n".join(decls))
-            .replace("##BEGIN_CALLS##", "\n".join(begin_calls))
-            .replace("##FINISH_CALLS##", "\n".join(finish_calls))
-        )
-
-        with open(join(outdir, "rpygen_wrapper.hpp"), "w") as fp:
-            fp.write(content)
diff --git a/robotpy_build/__init__.py b/src/semiwrap/__init__.py
similarity index 100%
rename from robotpy_build/__init__.py
rename to src/semiwrap/__init__.py
diff --git a/robotpy_build/__main__.py b/src/semiwrap/__main__.py
similarity index 100%
rename from robotpy_build/__main__.py
rename to src/semiwrap/__main__.py
diff --git a/robotpy_build/autowrap/__init__.py b/src/semiwrap/autowrap/__init__.py
similarity index 100%
rename from robotpy_build/autowrap/__init__.py
rename to src/semiwrap/autowrap/__init__.py
diff --git a/robotpy_build/autowrap/buffer.py b/src/semiwrap/autowrap/buffer.py
similarity index 100%
rename from robotpy_build/autowrap/buffer.py
rename to src/semiwrap/autowrap/buffer.py
diff --git a/robotpy_build/autowrap/context.py b/src/semiwrap/autowrap/context.py
similarity index 98%
rename from robotpy_build/autowrap/context.py
rename to src/semiwrap/autowrap/context.py
index c42079fc..c209b39d 100644
--- a/robotpy_build/autowrap/context.py
+++ b/src/semiwrap/autowrap/context.py
@@ -12,6 +12,7 @@
 
 from dataclasses import dataclass, field
 import enum
+import pathlib
 import typing
 
 from cxxheaderparser.types import Function, PQName
@@ -372,6 +373,9 @@ class ClassContext:
 
     parent: typing.Optional["ClassContext"]
 
+    #: lookup key for dat2trampoline
+    yml_id: str
+
     #: Namespace that this class lives in
     namespace: str
 
@@ -494,7 +498,7 @@ class TemplateInstanceContext:
     full_cpp_name_identifier: str
     binder_typename: str
 
-    params: typing.List[str]
+    params: typing.List[typing.Union[int, str]]
 
     header_name: str
 
@@ -514,6 +518,9 @@ class HeaderContext:
     # Name in toml
     hname: str
 
+    # config file for this
+    orig_yaml: pathlib.Path
+
     extra_includes_first: typing.List[str]
     extra_includes: typing.List[str]
     inline_code: typing.Optional[str]
@@ -535,6 +542,9 @@ class HeaderContext:
     # same as classes, but only those that have trampolines
     classes_with_trampolines: typing.List[ClassContext] = field(default_factory=list)
 
+    # for diagnostic purposes
+    ignored_classes: typing.List[str] = field(default_factory=list)
+
     functions: typing.List[FunctionContext] = field(default_factory=list)
 
     # trampolines
diff --git a/robotpy_build/autowrap/cxxparser.py b/src/semiwrap/autowrap/cxxparser.py
similarity index 97%
rename from robotpy_build/autowrap/cxxparser.py
rename to src/semiwrap/autowrap/cxxparser.py
index d2d77c82..29d36dcb 100644
--- a/robotpy_build/autowrap/cxxparser.py
+++ b/src/semiwrap/autowrap/cxxparser.py
@@ -58,6 +58,7 @@
     Variable,
 )
 
+from ..casters import CastersData
 from ..config.autowrap_yml import (
     AutowrapConfigYaml,
     BufferData,
@@ -87,18 +88,24 @@
     TrampolineData,
 )
 
+from ..util import relpath_walk_up
+
 
 class HasSubpackage(Protocol):
-    subpackage: typing.Optional[str]
+    @property
+    def subpackage(self) -> typing.Optional[str]: ...
 
 
 class HasDoc(Protocol):
-    doc: str
-    doc_append: str
+    @property
+    def doc(self) -> typing.Optional[str]: ...
+    @property
+    def doc_append(self) -> typing.Optional[str]: ...
 
 
 class HasNameData(Protocol):
-    rename: str
+    @property
+    def rename(self) -> typing.Optional[str]: ...
 
 
 # TODO: this isn't the best solution
@@ -115,13 +122,13 @@ def _gen_int_types():
 
 
 _rvp_map = {
-    ReturnValuePolicy.TAKE_OWNERSHIP: "py::return_value_policy::take_ownership",
-    ReturnValuePolicy.COPY: "py::return_value_policy::copy",
-    ReturnValuePolicy.MOVE: "py::return_value_policy::move",
-    ReturnValuePolicy.REFERENCE: "py::return_value_policy::reference",
-    ReturnValuePolicy.REFERENCE_INTERNAL: "py::return_value_policy::reference_internal",
-    ReturnValuePolicy.AUTOMATIC: "",
-    ReturnValuePolicy.AUTOMATIC_REFERENCE: "py::return_value_policy::automatic_reference",
+    ReturnValuePolicy.take_ownership: "py::return_value_policy::take_ownership",
+    ReturnValuePolicy.copy: "py::return_value_policy::copy",
+    ReturnValuePolicy.move: "py::return_value_policy::move",
+    ReturnValuePolicy.reference: "py::return_value_policy::reference",
+    ReturnValuePolicy.reference_internal: "py::return_value_policy::reference_internal",
+    ReturnValuePolicy.automatic: "",
+    ReturnValuePolicy.automatic_reference: "py::return_value_policy::automatic_reference",
 }
 
 # fmt: off
@@ -329,7 +336,7 @@ def __init__(
         self,
         hctx: HeaderContext,
         gendata: GeneratorData,
-        casters: typing.Dict[str, typing.Dict[str, typing.Any]],
+        casters: CastersData,
         report_only: bool,
     ) -> None:
         self.hctx = hctx
@@ -395,7 +402,7 @@ def on_template_inst(self, state: AWState, inst: TemplateInst) -> None:
         pass
 
     def on_variable(self, state: AWState, v: Variable) -> None:
-        # TODO: robotpy-build doesn't wrap global variables at this time
+        # TODO: semiwrap doesn't wrap global variables at this time
         pass
 
     def on_function(self, state: AWNonClassBlockState, fn: Function) -> None:
@@ -587,12 +594,16 @@ def on_class_start(self, state: AWClassBlockState) -> typing.Optional[bool]:
             return False
 
         cls_key, cls_name, cls_namespace, parent_ctx = cls_name_result
-        class_data = self.gendata.get_class_data(cls_key)
+        class_data, missing = self.gendata.get_class_data(cls_key)
 
-        # Ignore explicitly ignored classes
+        # Ignore explicitly ignored classes (including default-ignore)
         if class_data.ignore:
+            self.hctx.ignored_classes.append(cls_key)
             return False
 
+        if missing and not self.report_only:
+            raise ValueError(f"'{cls_key}' must be in {self.gendata.data_fname}")
+
         for typename in class_data.force_type_casters:
             self._add_user_type_caster(typename)
 
@@ -718,6 +729,7 @@ def on_class_start(self, state: AWClassBlockState) -> typing.Optional[bool]:
 
         ctx = ClassContext(
             parent=parent_ctx,
+            yml_id=cls_key,
             namespace=cls_namespace,
             cpp_name=cls_name,
             full_cpp_name=cls_qualname,
@@ -792,10 +804,14 @@ def _process_class_name(
 
         if not isinstance(parent, ClassBlockState):
             # easy case -- namespace is the next user_data up
-            cls_key = cls_name
             cls_namespace = typing.cast(str, parent.user_data)
             if extra_segments:
                 cls_namespace = f"{cls_namespace}::{extra_segments}"
+
+            if cls_namespace:
+                cls_key = f"{cls_namespace}::{cls_name}"
+            else:
+                cls_key = cls_name
         else:
             # Use things the parent already computed
             cdata = typing.cast(ClassStateData, parent.user_data)
@@ -938,7 +954,7 @@ def _on_class_field(
         else:
             py_name = prop_name
 
-        if propdata.access == PropAccess.AUTOMATIC:
+        if propdata.access == PropAccess.auto:
             # const variables can't be written
             if f.constexpr or getattr(f.type, "const", False):
                 prop_readonly = True
@@ -949,7 +965,7 @@ def _on_class_field(
             else:
                 prop_readonly = _is_prop_readonly(f.type)
         else:
-            prop_readonly = propdata.access == PropAccess.READONLY
+            prop_readonly = propdata.access == PropAccess.readonly
 
         doc = self._process_doc(f.doxygen, propdata)
 
@@ -1890,8 +1906,8 @@ def _add_default_arg_cast(
             typename = _fmt_nameonly(ntype.typename)
             if typename:
                 ccfg = self.casters.get(typename)
-                if ccfg and ccfg.get("darg"):
-                    found_typename = ccfg["typename"]
+                if ccfg and ccfg.default_arg_cast:
+                    found_typename = ccfg.typename
                     name = f"({found_typename}){name}"
 
         return name
@@ -1967,7 +1983,7 @@ def _set_type_caster_includes(self):
         for typename in self.types:
             ccfg = casters.get(typename)
             if ccfg:
-                includes.add(ccfg["hdr"])
+                includes.add(ccfg.header)
 
         self.hctx.type_caster_includes = sorted(includes)
 
@@ -1978,7 +1994,7 @@ def parse_header(
     header_root: pathlib.Path,
     gendata: GeneratorData,
     parser_options: ParserOptions,
-    casters: typing.Dict[str, typing.Dict[str, typing.Any]],
+    casters: CastersData,
     report_only: bool,
 ) -> HeaderContext:
     user_cfg = gendata.data
@@ -1986,10 +2002,11 @@ def parse_header(
     # Initialize the header context with user configuration
     hctx = HeaderContext(
         hname=name,
+        orig_yaml=gendata.data_fname,
         extra_includes_first=user_cfg.extra_includes_first,
         extra_includes=user_cfg.extra_includes,
         inline_code=user_cfg.inline_code,
-        rel_fname=str(header_path.relative_to(header_root)),
+        rel_fname=relpath_walk_up(header_path, header_root).as_posix(),
     )
 
     # Parse the header using a custom visitor
@@ -2038,7 +2055,8 @@ def parse_header(
                 break
 
         for param in tmpl_data.params:
-            visitor._add_user_type_caster(param)
+            if isinstance(param, str):
+                visitor._add_user_type_caster(param)
 
     # User typealias additions
     visitor._extract_typealias(user_cfg.typealias, hctx.user_typealias, set())
diff --git a/robotpy_build/autowrap/generator_data.py b/src/semiwrap/autowrap/generator_data.py
similarity index 90%
rename from robotpy_build/autowrap/generator_data.py
rename to src/semiwrap/autowrap/generator_data.py
index 3100c408..baa4e9d9 100644
--- a/robotpy_build/autowrap/generator_data.py
+++ b/src/semiwrap/autowrap/generator_data.py
@@ -6,13 +6,18 @@
     AutowrapConfigYaml,
     PropData,
     FunctionData,
+    OverloadData,
 )
 from .context import OverloadTracker
 
 from cxxheaderparser.types import Function
 
+from validobj.validation import parse_input
+
 import dataclasses
-from typing import Dict, List, Optional, Tuple
+import enum
+import pathlib
+from typing import Any, Dict, List, Optional, Tuple
 
 
 @dataclasses.dataclass
@@ -40,6 +45,23 @@ class ClsReportData:
     functions: FnMissingData = dataclasses.field(default_factory=dict)
 
 
+def _merge_overload(data: FunctionData, overload: OverloadData) -> FunctionData:
+    # merge overload information
+    # - create a dictionary that contains things that haven't changed
+    changes = {"overloads": {}}
+    for f in dataclasses.fields(OverloadData):
+        v = getattr(overload, f.name)
+        if f.default_factory is not dataclasses.MISSING:
+            default = f.default_factory()
+        else:
+            default = f.default
+
+        if v != default:
+            changes[f.name] = v
+
+    return dataclasses.replace(data, **changes)
+
+
 class GeneratorData:
     """
     Used by the hooks to retrieve user-specified generation data, and
@@ -48,8 +70,9 @@ class GeneratorData:
 
     data: AutowrapConfigYaml
 
-    def __init__(self, data: AutowrapConfigYaml):
+    def __init__(self, data: AutowrapConfigYaml, data_fname: pathlib.Path):
         self.data = data
+        self.data_fname = data_fname
 
         default_ignore = self.data.defaults.ignore
         self._default_enum_data = EnumData(ignore=default_ignore)
@@ -64,9 +87,9 @@ def __init__(self, data: AutowrapConfigYaml):
         self.enums: EnumMissingData = {}
         self.attributes: AttrMissingData = {}
 
-    def get_class_data(self, name: str) -> ClassData:
+    def get_class_data(self, name: str) -> Tuple[ClassData, bool]:
         """
-        The 'name' is [parent_class::]class_name
+        The 'name' is namespace::[parent_class::]class_name
         """
         data = self.data.classes.get(name)
         missing = data is None
@@ -74,7 +97,7 @@ def get_class_data(self, name: str) -> ClassData:
             data = self._default_class_data
 
         self.classes[name] = ClsReportData(missing=missing)
-        return data
+        return data, missing
 
     def get_cls_enum_data(
         self, name: str, cls_key: str, cls_data: ClassData
@@ -141,11 +164,7 @@ def get_function_data(
             overload = data.overloads.get(signature)
             missing = overload is None
             if not missing and overload:
-                # merge overload information
-                data = data.dict(exclude_unset=True)
-                del data["overloads"]
-                data.update(overload.dict(exclude_unset=True))
-                data = FunctionData(**data)
+                data = _merge_overload(data, overload)
             report_data.overloads[signature] = is_private or not missing
 
         report_data.tracker.add_overload()
@@ -187,7 +206,7 @@ def get_prop_data(self, name) -> PropData:
 
         return data
 
-    def report_missing(self, name: str, reporter: "MissingReporter"):
+    def report_missing(self, name: pathlib.Path, reporter: "MissingReporter"):
         """
         Generate a structure that can be copy/pasted into the generation
         data yaml and print it out if there's missing data
@@ -341,6 +360,9 @@ class MissingReporter:
     def __init__(self):
         self.reports = {}
 
+    def __bool__(self) -> bool:
+        return len(self.reports) > 0
+
     def _merge(self, src, dst):
         for k, v in src.items():
             if isinstance(v, dict):
diff --git a/robotpy_build/autowrap/mangle.py b/src/semiwrap/autowrap/mangle.py
similarity index 100%
rename from robotpy_build/autowrap/mangle.py
rename to src/semiwrap/autowrap/mangle.py
diff --git a/robotpy_build/autowrap/render_cls_prologue.py b/src/semiwrap/autowrap/render_cls_prologue.py
similarity index 93%
rename from robotpy_build/autowrap/render_cls_prologue.py
rename to src/semiwrap/autowrap/render_cls_prologue.py
index 98d0ce64..4d7f41d1 100644
--- a/robotpy_build/autowrap/render_cls_prologue.py
+++ b/src/semiwrap/autowrap/render_cls_prologue.py
@@ -38,4 +38,4 @@ def render_class_prologue(r: RenderBuffer, hctx: HeaderContext):
         r.writeln(
             f"#define RPYGEN_ENABLE_{cls.full_cpp_name_identifier}_PROTECTED_CONSTRUCTORS"
         )
-        r.writeln(f"#include <rpygen/{cls.full_cpp_name_identifier}.hpp>")
+        r.writeln(f"#include <trampolines/{cls.full_cpp_name_identifier}.hpp>")
diff --git a/robotpy_build/autowrap/render_cls_rpy_include.py b/src/semiwrap/autowrap/render_cls_rpy_include.py
similarity index 96%
rename from robotpy_build/autowrap/render_cls_rpy_include.py
rename to src/semiwrap/autowrap/render_cls_rpy_include.py
index b6694684..3a16e49d 100644
--- a/robotpy_build/autowrap/render_cls_rpy_include.py
+++ b/src/semiwrap/autowrap/render_cls_rpy_include.py
@@ -40,14 +40,15 @@ def render_cls_rpy_include_hpp(ctx: HeaderContext, cls: ClassContext) -> str:
     )
 
     if ctx.extra_includes_first:
-        r.writeln()
+        r.writeln("\n// from extra_includes_first")
         for inc in ctx.extra_includes_first:
             r.writeln(f"#include <{inc}>")
 
-    r.writeln(f"\n#include <{ctx.rel_fname}>")
+    r.writeln("\n// wrapped header")
+    r.writeln(f"#include <{ctx.rel_fname}>")
 
     if ctx.extra_includes:
-        r.writeln()
+        r.writeln("\n// from extra_includes")
         for inc in ctx.extra_includes:
             r.writeln(f"#include <{inc}>")
 
@@ -57,6 +58,11 @@ def render_cls_rpy_include_hpp(ctx: HeaderContext, cls: ClassContext) -> str:
     if cls.template is not None:
         _render_cls_template_impl(r, ctx, cls, cls.template)
 
+    if not cls.template and not cls.trampoline:
+        # TODO: maybe replace this with something that triggers an error
+        #       if the trampoline is used? and a better error message
+        r.writeln(f'#error "{cls.full_cpp_name} does not have a trampoline"')
+
     return r.getvalue()
 
 
@@ -99,7 +105,7 @@ def _render_cls_trampoline(
     if cls.bases:
         r.writeln()
         for base in cls.bases:
-            r.writeln(f"#include <rpygen/{ base.full_cpp_name_identifier }.hpp>")
+            r.writeln(f"#include <trampolines/{ base.full_cpp_name_identifier }.hpp>")
 
     if cls.namespace:
         r.writeln(f"\nnamespace {cls.namespace.strip('::')} {{")
diff --git a/robotpy_build/autowrap/render_pybind11.py b/src/semiwrap/autowrap/render_pybind11.py
similarity index 100%
rename from robotpy_build/autowrap/render_pybind11.py
rename to src/semiwrap/autowrap/render_pybind11.py
diff --git a/robotpy_build/autowrap/render_tmpl_inst.py b/src/semiwrap/autowrap/render_tmpl_inst.py
similarity index 93%
rename from robotpy_build/autowrap/render_tmpl_inst.py
rename to src/semiwrap/autowrap/render_tmpl_inst.py
index 028233ba..96b31d7c 100644
--- a/robotpy_build/autowrap/render_tmpl_inst.py
+++ b/src/semiwrap/autowrap/render_tmpl_inst.py
@@ -10,11 +10,11 @@ def render_template_inst_cpp(
     r = RenderBuffer()
     render_class_prologue(r, hctx)
 
-    tmpl_params = ", ".join(tmpl_data.params)
+    tmpl_params = ", ".join(str(p) for p in tmpl_data.params)
 
     r.write_trim(
         f"""
-        #include <rpygen/{ tmpl_data.header_name }>
+        #include <trampolines/{ tmpl_data.header_name }>
         #include "{ hctx.hname }_tmpl.hpp"
 
         namespace rpygen {{
diff --git a/robotpy_build/autowrap/render_wrapped.py b/src/semiwrap/autowrap/render_wrapped.py
similarity index 100%
rename from robotpy_build/autowrap/render_wrapped.py
rename to src/semiwrap/autowrap/render_wrapped.py
diff --git a/robotpy_build/autowrap/writer.py b/src/semiwrap/autowrap/writer.py
similarity index 75%
rename from robotpy_build/autowrap/writer.py
rename to src/semiwrap/autowrap/writer.py
index 3b06da62..7ef1b825 100644
--- a/robotpy_build/autowrap/writer.py
+++ b/src/semiwrap/autowrap/writer.py
@@ -1,6 +1,6 @@
 import json
 import os
-from os.path import join
+import pathlib
 import pprint
 import typing
 
@@ -18,25 +18,25 @@ def write_files(
         self,
         hctx: HeaderContext,
         name: str,
-        cxx_gen_dir: str,
-        hppoutdir: str,
-        classdeps_json_fname: str,
-    ) -> typing.List[str]:
+        dst_cpp: pathlib.Path,
+        hppoutdir: pathlib.Path,
+        classdeps_json_fname: pathlib.Path,
+    ) -> typing.List[pathlib.Path]:
         """Generates all files needed for a single processed header"""
 
-        generated_sources: typing.List[str] = []
+        generated_sources: typing.List[pathlib.Path] = []
 
         # Jinja requires input as a dictionary
         data = hctx.__dict__
 
         if _emit_j2_debug:
-            with open(join(cxx_gen_dir, f"{name}.txt"), "w") as fp:
+            with open(dst_cpp.parent / f"{name}.txt", "w") as fp:
                 fp.write(pprint.pformat(hctx))
 
         # Write the cpp file first
-        fname = join(cxx_gen_dir, f"{name}.cpp")
-        generated_sources.append(fname)
-        with open(fname, "w", encoding="utf-8") as fp:
+
+        generated_sources.append(dst_cpp)
+        with open(dst_cpp, "w", encoding="utf-8") as fp:
             fp.write(render_wrapped_cpp(hctx))
 
         # Then the json
@@ -49,9 +49,8 @@ def write_files(
             if not cls.template and not cls.trampoline:
                 continue
 
-            fname = join(
-                hppoutdir, f"{cls.namespace.replace(':', '_')}__{cls.cpp_name}.hpp"
-            )
+            fname = hppoutdir / f"{cls.namespace.replace(':', '_')}__{cls.cpp_name}.hpp"
+
             with open(fname, "w", encoding="utf-8") as fp:
                 fp.write(render_cls_rpy_include_hpp(hctx, cls))
 
@@ -59,14 +58,14 @@ def write_files(
         # compiler memory requirements when compiling obnoxious templates
         if hctx.template_instances:
             # Single header output that holds all the struct outlines
-            fname = join(cxx_gen_dir, f"{name}_tmpl.hpp")
+            fname = dst_cpp / f"{name}_tmpl.hpp"
             with open(fname, "w", encoding="utf-8") as fp:
                 fp.write(render_template_inst_hpp(hctx))
 
             # Each cpp file has a single class template instance
             for i, tmpl_data in enumerate(hctx.template_instances):
                 data["tmpl_data"] = tmpl_data
-                fname = join(hppoutdir, f"{name}_tmpl{i+1}.cpp")
+                fname = hppoutdir / f"{name}_tmpl{i+1}.cpp"
                 generated_sources.append(fname)
                 with open(fname, "w", encoding="utf-8") as fp:
                     fp.write(render_template_inst_cpp(hctx, tmpl_data))
diff --git a/src/semiwrap/casters.py b/src/semiwrap/casters.py
new file mode 100644
index 00000000..5c71adc1
--- /dev/null
+++ b/src/semiwrap/casters.py
@@ -0,0 +1,55 @@
+import dataclasses
+import json
+import pathlib
+import typing as T
+
+from .config.util import parse_input
+
+PKGCONF_CASTER_EXT = ".pybind11.json"
+
+#
+# JSON caster data
+#
+
+
+@dataclasses.dataclass
+class TypeCasterJsonHeader:
+    header: str
+    types: T.List[str]
+    default_arg_cast: bool = False
+
+
+#: content of .pybind11.json
+@dataclasses.dataclass
+class TypeCasterJsonData:
+    """
+    Stored in *.pybind11.json
+    """
+
+    headers: T.List[TypeCasterJsonHeader] = dataclasses.field(default_factory=list)
+
+
+def load_typecaster_json_data(fname) -> TypeCasterJsonData:
+    with open(fname) as fp:
+        return parse_input(json.load(fp), TypeCasterJsonData, fname)
+
+
+def save_typecaster_json_data(fname: pathlib.Path, data: TypeCasterJsonData):
+    with open(fname, "w") as fp:
+        json.dump(dataclasses.asdict(data), fp)
+
+
+#
+# Pickled caster lookup as stored by resolve_casters
+#
+
+
+@dataclasses.dataclass
+class TypeData:
+    header: pathlib.Path
+    typename: str
+    default_arg_cast: bool
+
+
+#: content of pickle file used internally
+CastersData = T.Dict[str, TypeData]
diff --git a/src/semiwrap/cmd_creategen.py b/src/semiwrap/cmd_creategen.py
new file mode 100644
index 00000000..0d39a20e
--- /dev/null
+++ b/src/semiwrap/cmd_creategen.py
@@ -0,0 +1,83 @@
+"""
+Create YAML files from parsed header files
+"""
+
+import argparse
+import pathlib
+import sys
+
+from .autowrap.generator_data import MissingReporter
+from .cmd_header2dat import make_argparser, generate_wrapper
+from .makeplan import InputFile, makeplan, BuildTarget
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
+    )
+    parser.add_argument(
+        "--write", action="store_true", help="Write to files if they don't exist"
+    )
+    args = parser.parse_args()
+
+    project_root = pathlib.Path.cwd()
+
+    plan = makeplan(project_root, missing_yaml_ok=True)
+
+    for item in plan:
+        if not isinstance(item, BuildTarget) or item.command != "header2dat":
+            continue
+
+        # convert args to string so we can parse it
+        # .. this is weird, but less annoying than other alternatives
+        #    that I can think of?
+        argv = []
+        for arg in item.args:
+            if isinstance(arg, str):
+                argv.append(arg)
+            elif isinstance(arg, InputFile):
+                argv.append(str(arg.path.absolute()))
+            elif isinstance(arg, pathlib.Path):
+                argv.append(str(arg.absolute()))
+            else:
+                # anything else shouldn't matter
+                argv.append("ignored")
+
+        sparser = make_argparser()
+        sargs = sparser.parse_args(argv)
+
+        reporter = MissingReporter()
+
+        generate_wrapper(
+            name=sargs.name,
+            src_yml=sargs.src_yml,
+            src_h=sargs.src_h,
+            src_h_root=sargs.src_h_root,
+            dst_dat=None,
+            dst_depfile=None,
+            include_paths=sargs.include_paths,
+            casters={},
+            pp_defines=sargs.pp_defines,
+            missing_reporter=reporter,
+            report_only=True,
+        )
+
+        if reporter:
+            for name, report in reporter.as_yaml():
+                report = f"---\n\n{report}"
+
+                nada = False
+                if args.write:
+                    if not name.exists():
+                        print("Writing", name)
+                        with open(name, "w") as fp:
+                            fp.write(report)
+                    else:
+                        print(name, "already exists!")
+
+                print("===", name, "===")
+                print(report)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_dat2cpp.py b/src/semiwrap/cmd_dat2cpp.py
new file mode 100644
index 00000000..03b00d3b
--- /dev/null
+++ b/src/semiwrap/cmd_dat2cpp.py
@@ -0,0 +1,36 @@
+"""
+Creates an output .cpp file from a .dat file created by parsing a header
+"""
+
+import inspect
+import pathlib
+import pickle
+import sys
+
+from .autowrap.context import HeaderContext
+from .autowrap.render_wrapped import render_wrapped_cpp
+
+
+def _write_wrapper_cpp(input_dat: pathlib.Path, output_cpp: pathlib.Path):
+    with open(input_dat, "rb") as fp:
+        hctx = pickle.load(fp)
+
+    assert isinstance(hctx, HeaderContext)
+
+    content = render_wrapped_cpp(hctx)
+    with open(output_cpp, "w", encoding="utf-8") as fp:
+        fp.write(content)
+
+
+def main():
+    try:
+        _, input_dat, output_cpp = sys.argv
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    _write_wrapper_cpp(pathlib.Path(input_dat), pathlib.Path(output_cpp))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_dat2tmplcpp.py b/src/semiwrap/cmd_dat2tmplcpp.py
new file mode 100644
index 00000000..418e61bc
--- /dev/null
+++ b/src/semiwrap/cmd_dat2tmplcpp.py
@@ -0,0 +1,41 @@
+"""
+Creates a template instance .cpp file from a .dat file created by parsing a header
+"""
+
+import inspect
+import pathlib
+import pickle
+import sys
+
+from .autowrap.context import HeaderContext
+from .autowrap.render_tmpl_inst import render_template_inst_cpp
+
+
+def _write_wrapper_cpp(input_dat: pathlib.Path, py_name: str, output_cpp: pathlib.Path):
+    with open(input_dat, "rb") as fp:
+        hctx = pickle.load(fp)
+
+    assert isinstance(hctx, HeaderContext)
+
+    for tmpl in hctx.template_instances:
+        if tmpl.py_name == py_name:
+            break
+    else:
+        raise ValueError(f"internal error: cannot find {py_name} in {hctx.orig_yaml}")
+
+    content = render_template_inst_cpp(hctx, tmpl)
+    output_cpp.write_text(content)
+
+
+def main():
+    try:
+        _, input_dat, py_name, output_cpp = sys.argv
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    _write_wrapper_cpp(pathlib.Path(input_dat), py_name, pathlib.Path(output_cpp))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_dat2tmplhpp.py b/src/semiwrap/cmd_dat2tmplhpp.py
new file mode 100644
index 00000000..ebfa849f
--- /dev/null
+++ b/src/semiwrap/cmd_dat2tmplhpp.py
@@ -0,0 +1,33 @@
+""" """
+
+import inspect
+import pathlib
+import pickle
+import sys
+
+from .autowrap.context import HeaderContext
+from .autowrap.render_tmpl_inst import render_template_inst_hpp
+
+
+def _write_tmpl_hpp(input_dat: pathlib.Path, output_hpp: pathlib.Path):
+    with open(input_dat, "rb") as fp:
+        hctx = pickle.load(fp)
+
+    assert isinstance(hctx, HeaderContext)
+
+    content = render_template_inst_hpp(hctx)
+    output_hpp.write_text(content)
+
+
+def main():
+    try:
+        _, input_dat, output_hpp = sys.argv
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    _write_tmpl_hpp(pathlib.Path(input_dat), pathlib.Path(output_hpp))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_dat2trampoline.py b/src/semiwrap/cmd_dat2trampoline.py
new file mode 100644
index 00000000..1e23e6c8
--- /dev/null
+++ b/src/semiwrap/cmd_dat2trampoline.py
@@ -0,0 +1,65 @@
+"""
+Creates an output .hpp file from a .dat file created by parsing a header
+"""
+
+import inspect
+import pathlib
+import pickle
+import sys
+
+from .autowrap.context import HeaderContext, ClassContext
+from .autowrap.render_cls_rpy_include import render_cls_rpy_include_hpp
+
+
+def _get_classes(hctx: HeaderContext):
+    def _get_child_classes(c: ClassContext):
+        for c in c.child_classes:
+            yield c
+            _get_child_classes(c)
+
+    for cls in hctx.classes:
+        yield cls
+        yield from _get_child_classes(cls)
+
+
+def _write_wrapper_cpp(input_dat: pathlib.Path, yml_id: str, output_hpp: pathlib.Path):
+    with open(input_dat, "rb") as fp:
+        hctx = pickle.load(fp)
+
+    assert isinstance(hctx, HeaderContext)
+
+    avail = []
+    for cls in _get_classes(hctx):
+        avail.append(cls.yml_id)
+        if cls.yml_id == yml_id:
+            break
+    else:
+        msg = [
+            f"cannot find {yml_id} in {hctx.rel_fname}",
+            f"- config: {hctx.orig_yaml}",
+        ]
+
+        if avail:
+            msg.append("- found " + ", ".join(avail))
+
+        if hctx.ignored_classes:
+            msg.append("- ignored " + ", ".join(hctx.ignored_classes))
+
+        raise ValueError("\n".join(msg))
+
+    content = render_cls_rpy_include_hpp(hctx, cls)
+    output_hpp.write_text(content)
+
+
+def main():
+    try:
+        _, input_dat, yml_id, output_hpp = sys.argv
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    _write_wrapper_cpp(pathlib.Path(input_dat), yml_id, pathlib.Path(output_hpp))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_gen_libinit.py b/src/semiwrap/cmd_gen_libinit.py
new file mode 100644
index 00000000..ebef05d8
--- /dev/null
+++ b/src/semiwrap/cmd_gen_libinit.py
@@ -0,0 +1,145 @@
+"""
+Generates a file that loads shared library dependencies for the module
+
+Arguments: pyproject.toml package.name /path/to/_init_name.py
+"""
+
+from __future__ import annotations
+
+import inspect
+import pathlib
+import platform
+import sys
+import typing as T
+
+from .autowrap.buffer import RenderBuffer
+
+platform_sys = platform.system()
+is_windows = platform_sys == "Windows"
+is_macos = platform_sys == "Darwin"
+
+
+# TODO: this belongs in a separate script/api that can be used from multiple tools
+# .. maybe in dynamic-library
+def _write_libinit_py(
+    # project: PyProject,
+    init_py: pathlib.Path,
+    modules: T.List[str],
+    # libs: T.List[pathlib.Path],
+    # requires: T.List[str],
+):
+    """
+    :param init_py: the _init module for the library(ies) that is written out
+    :param libs: for each library that is being initialized, this is the
+                 path to that library
+
+    :param requires: other pkgconf packages that these libraries depend on.
+                     Their init_py will be looked up and imported first.
+    """
+
+    # platform = project.platform
+    r = RenderBuffer()
+
+    r.writeln("# This file is automatically generated, DO NOT EDIT")
+    r.writeln("# fmt: off")
+    r.writeln()
+
+    for mod in modules:
+        r.writeln(f"import {mod}")
+
+    need_space = False
+    # for req in requires:
+    #     module = project.pkgconf.get(req).libinit_py
+    #     if module:
+    #         r.writeln(f"import {module}")
+    #         need_space = True
+
+    # if need_space:
+    # r.writeln()
+
+    # TODO: don't need this?
+
+    # if libs:
+    #     r.writeln("def __load_library():")
+
+    #     with r.indent(4):
+
+    #         r.writeln("from os.path import abspath, join, dirname, exists")
+    #         if platform.os == "osx":
+    #             r.writeln("from ctypes import CDLL, RTLD_GLOBAL")
+    #         else:
+    #             r.writeln("from ctypes import cdll")
+
+    #         r.writeln()
+
+    #         r.writeln("root = abspath(dirname(__file__))")
+    #         r.writeln("libs = []")
+
+    #         for lib in libs:
+    #             rel = lib.relative_to(init_py.parent)
+    #             components = ", ".join(map(repr, rel.parts))
+
+    #             r.writeln(f"lib_path = join(root, {components})")
+
+    #             r.writeln("try:")
+    #             with r.indent(4):
+    #                 if platform.os == "osx":
+    #                     r.writeln(f"libs += [CDLL(lib_path, mode=RTLD_GLOBAL)]")
+    #                 else:
+    #                     r.writeln("libs += [cdll.LoadLibrary(lib_path)]")
+
+    #             r.writeln("except FileNotFoundError:")
+    #             with r.indent(4):
+    #                 r.writeln("if not exists(lib_path):")
+    #                 r.writeln(
+    #                     f'    raise FileNotFoundError("{lib.name} was not found on your system. Is this package correctly installed?")'
+    #                 )
+    #             if platform.os == "windows":
+    #                 r.writeln(
+    #                     f'raise Exception("{lib.name} could not be loaded. Do you have Visual Studio C++ Redistributible 2019 installed?")'
+    #                 )
+    #             else:
+    #                 r.writeln(
+    #                     f'raise FileNotFoundError("{lib.name} could not be loaded. There is a missing dependency.")'
+    #                 )
+
+    #             r.writeln()
+
+    #         r.writeln("return libs")
+
+    #     r.writeln()
+    #     r.writeln("__libs = __load_library()")
+    #     r.writeln()
+
+    init_py.write_text(r.getvalue())
+
+
+def main():
+    try:
+        _, libinit_py = sys.argv[:2]
+        modules = sys.argv[2:]
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    # pyproject = PyProject(pathlib.Path(pyproject_toml))
+
+    # module = pyproject.get_extension(package_name)
+    # depends = pyproject.get_extension_deps(module)
+
+    # TODO: libs
+    # .. oh, those were going to get created by the downloader hatch plugin
+    #    so we don't need them? maybe
+    # libs = []
+
+    # need os, dependencies, section name
+    _write_libinit_py(
+        # pyproject,
+        pathlib.Path(libinit_py),
+        modules,
+        # depends,
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_gen_modinit_hpp.py b/src/semiwrap/cmd_gen_modinit_hpp.py
new file mode 100644
index 00000000..83d4a020
--- /dev/null
+++ b/src/semiwrap/cmd_gen_modinit_hpp.py
@@ -0,0 +1,114 @@
+"""
+Usage: module_name output_hpp input_dat[, input_dat...]
+
+Generates a header file that contains initialization functions for pybind11 bindings
+
+You must include the header "autogen_module_init.hpp", and call initWrapper() from
+your pybind11 module declaration.
+"""
+
+import pathlib
+import pickle
+import sys
+import typing as T
+
+import toposort
+
+from .autowrap.buffer import RenderBuffer
+from .autowrap.context import HeaderContext
+
+
+def _write_wrapper_hpp(
+    module_name: str, output_hpp: pathlib.Path, *input_dat: pathlib.Path
+):
+    # Need to ensure that wrapper initialization is called in base order
+    # so we have to toposort it here based on the class hierarchy determined
+    # while parsing the header
+    types2name = {}
+    types2deps = {}
+    ordering = []
+
+    for datfile in input_dat:
+        with open(datfile, "rb") as fp:
+            hctx = pickle.load(fp)
+
+        assert isinstance(hctx, HeaderContext)
+
+        name = hctx.hname
+        dep = hctx.class_hierarchy
+
+        # make sure objects without classes are also included!
+        if not dep:
+            ordering.append(name)
+
+        for clsname, bases in dep.items():
+            if clsname in types2name:
+                raise ValueError(f"{name} ({input_dat}): duplicate class {clsname}")
+            types2name[clsname] = name
+            types2deps[clsname] = bases[:]
+
+    to_sort: T.Dict[str, T.Set[str]] = {}
+    for clsname, bases in types2deps.items():
+        clsname = types2name[clsname]
+        deps = to_sort.setdefault(clsname, set())
+        for base in bases:
+            base = types2name.get(base)
+            if base and base != clsname:
+                deps.add(base)
+
+    ordering.extend(toposort.toposort_flatten(to_sort, sort=True))
+
+    r = RenderBuffer()
+    r.writeln("// This file is autogenerated, DO NOT EDIT")
+    r.writeln("")
+    r.write_trim(
+        f"""
+        #pragma once
+        #include <robotpy_build.h>
+
+        // Use this to define your module instead of PYBIND11_MODULE
+        #define SEMIWRAP_PYBIND11_MODULE(variable) PYBIND11_MODULE({module_name}, variable)
+
+        // TODO: namespace semiwrap::autogen {{
+
+        // forward declarations
+        """
+    )
+    for name in ordering:
+        r.write_trim(
+            f"""
+                void begin_init_{name}(py::module &m);
+                void finish_init_{name}();
+            """
+        )
+        r.writeln()
+
+    r.writeln("static void initWrapper(py::module &m) {")
+    with r.indent():
+        for name in ordering:
+            r.writeln(f"begin_init_{name}(m);")
+        r.writeln()
+        for name in ordering:
+            r.writeln(f"finish_init_{name}();")
+
+    r.writeln("}")
+
+    output_hpp.write_text(r.getvalue())
+
+
+def main():
+    try:
+        module_name = sys.argv[1]
+        output_hpp = sys.argv[2]
+        inputs = sys.argv[3:]
+    except Exception as e:
+        print(__doc__, file=sys.stderr)
+        sys.exit(1)
+
+    _write_wrapper_hpp(
+        module_name, pathlib.Path(output_hpp), *[pathlib.Path(i) for i in inputs]
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_gen_pkgconf.py b/src/semiwrap/cmd_gen_pkgconf.py
new file mode 100644
index 00000000..6977adfe
--- /dev/null
+++ b/src/semiwrap/cmd_gen_pkgconf.py
@@ -0,0 +1,46 @@
+"""
+Generates a pkg-config file for this extension
+"""
+
+import argparse
+import inspect
+import pathlib
+
+from .mkpc import make_pc_file
+from .pyproject import PyProject
+
+
+def main():
+    parser = argparse.ArgumentParser(usage=inspect.cleandoc(__doc__ or ""))
+    parser.add_argument("module_package_name")
+    parser.add_argument("name")
+    parser.add_argument("pyproject_toml", type=pathlib.Path)
+    parser.add_argument("pcfile", type=pathlib.Path)
+    parser.add_argument("--libinit-py")
+    args = parser.parse_args()
+
+    module_package_name = args.module_package_name
+    project = PyProject(args.pyproject_toml)
+
+    module = project.get_extension(module_package_name)
+    depends = project.get_extension_deps(module)
+
+    pc_install_path = project.package_root / pathlib.Path(
+        *module_package_name.split(".")[:-1]
+    )
+    make_pc_file(
+        project_root=project.root,
+        pcfile=args.pcfile,
+        pc_install_path=pc_install_path,
+        name=args.name,
+        desc="semiwrap pybind11 module",
+        version="",
+        includes=module.includes,
+        depends=depends,
+        libinit_py=args.libinit_py,
+        generator_name="semiwrap.cmd_gen_pkgconf",
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_genmeson.py b/src/semiwrap/cmd_genmeson.py
new file mode 100644
index 00000000..f2c7fb66
--- /dev/null
+++ b/src/semiwrap/cmd_genmeson.py
@@ -0,0 +1,455 @@
+from __future__ import annotations
+
+import codecs
+import os
+import os.path
+import pathlib
+import re
+import sys
+import typing as T
+
+from .autowrap.buffer import RenderBuffer
+from .makeplan import (
+    Depfile,
+    Entrypoint,
+    InputFile,
+    OutputFile,
+    BuildTarget,
+    BuildTargetOutput,
+    ExtensionModule,
+    LocalDependency,
+    CppMacroValue,
+    makeplan,
+)
+from .util import maybe_write_file, relpath_walk_up
+
+# String escaping stolen from meson source code, Apache 2.0 license
+# This is the regex for the supported escape sequences of a regular string
+# literal, like 'abc\x00'
+ESCAPE_SEQUENCE_SINGLE_RE = re.compile(
+    r"""
+    ( \\U[A-Fa-f0-9]{8}   # 8-digit hex escapes
+    | \\u[A-Fa-f0-9]{4}   # 4-digit hex escapes
+    | \\x[A-Fa-f0-9]{2}   # 2-digit hex escapes
+    | \\[0-7]{1,3}        # Octal escapes
+    | \\N\{[^}]+\}        # Unicode characters by name
+    | \\[\\'abfnrtv]      # Single-character escapes
+    )""",
+    re.UNICODE | re.VERBOSE,
+)
+
+
+def _decode_match(match: T.Match[str]) -> str:
+    return codecs.decode(match.group(0).encode(), "unicode_escape")
+
+
+def _make_string(s: str):
+    s = ESCAPE_SEQUENCE_SINGLE_RE.sub(_decode_match, s)
+    return f"'{s}'"
+
+
+VarTypes = T.Union[
+    InputFile, OutputFile, BuildTarget, ExtensionModule, LocalDependency, CppMacroValue
+]
+
+
+class VarCache:
+    def __init__(self) -> None:
+        self.cache: T.Dict[VarTypes, str] = {}
+
+        # this might get annoying to debug, but for now this is easier...
+        self.idx = 1
+
+    def getvar(self, item: VarTypes) -> str:
+        var = self.cache.get(item)
+        if var is None:
+
+            # TODO: probably should name variables according to their target type
+            # to make it easier to debug
+
+            if isinstance(item, InputFile):
+                # .. this probably isn't right either, what should this be relative to?
+                # .. TODO should use files()? but maybe only if this is actually a variable
+                return _make_string(item.path.resolve().as_posix())
+                # var = f"sw_in_{self.idx}"
+            elif isinstance(item, OutputFile):
+                return _make_string(item.name)
+                # var = f"sw_out_{self.idx}"
+            elif isinstance(item, BuildTarget):
+                var = f"_sw_target_{self.idx}"
+            elif isinstance(item, ExtensionModule):
+                name = item.name.replace("-", "_")
+                var = f"{name}_module"
+            elif isinstance(item, LocalDependency):
+                name = item.name.replace("-", "_")
+                var = f"{name}_dep"
+            elif isinstance(item, CppMacroValue):
+                name = item.name
+                var = f"_sw_cpp_var_{name}"
+            else:
+                assert False
+
+            self.idx += 1
+            self.cache[item] = var
+        return var
+
+
+def _render_build_target(r: RenderBuffer, vc: VarCache, bt: BuildTarget):
+    outvar = vc.getvar(bt)
+
+    bt_cmd = bt.command.replace("-", "_")
+    cmd = [f"_sw_cmd_{bt_cmd}"]
+    tinput = []
+    toutput = []
+    depfile = None
+
+    for arg in bt.args:
+        if isinstance(arg, str):
+            cmd.append(_make_string(arg))
+        elif isinstance(arg, pathlib.Path):
+            cmd.append(_make_string(arg.resolve().as_posix()))
+        elif isinstance(arg, (BuildTarget, InputFile)):
+            cmd.append(f"'@INPUT{len(tinput)}@'")
+            tinput.append(vc.getvar(arg))
+        elif isinstance(arg, BuildTargetOutput):
+            cmd.append(f"'@INPUT{len(tinput)}@'")
+            tinput.append(f"{vc.getvar(arg.target)}[{arg.output_index}]")
+        elif isinstance(arg, OutputFile):
+            cmd.append(f"'@OUTPUT{len(toutput)}@'")
+            toutput.append(vc.getvar(arg))
+        elif isinstance(arg, Depfile):
+            assert depfile is None, bt
+            cmd.append("'@DEPFILE@'")
+            depfile = _make_string(arg.name)
+        elif isinstance(arg, ExtensionModule):
+            cmd.append(f"'@INPUT{len(tinput)}@'")
+            tinput.append(vc.getvar(arg))
+        elif isinstance(arg, CppMacroValue):
+            cmd.append(vc.getvar(arg))
+        else:
+            assert False, f"unexpected {arg!r} in {bt}"
+
+    r.writeln(f"{outvar} = custom_target(")
+    with r.indent(2):
+        _render_meson_args(r, "command", cmd)
+        if tinput:
+            _render_meson_args(r, "input", tinput)
+        if toutput:
+            _render_meson_args(r, "output", toutput)
+
+        if depfile:
+            r.writeln(f"depfile: {depfile},")
+
+        if bt.install_path is not None:
+            install_path = _make_string(bt.install_path.as_posix())
+            r.writeln(
+                f"install_dir: sw_py.get_install_dir(pure: false) / {install_path},"
+            )
+            r.writeln("install: true,")
+
+    r.writeln(")")
+
+
+def _render_include_directories(
+    r: RenderBuffer,
+    incs: T.Sequence[pathlib.Path],
+    meson_build_path: T.Optional[pathlib.Path],
+):
+    # meson wants these to be relative to meson.build
+    # - only can do that if we're writing an output file
+    if meson_build_path:
+        meson_build_parent = meson_build_path.parent
+        incs = [relpath_walk_up(p, meson_build_parent) for p in incs]
+
+    _render_meson_args(
+        r, "include_directories", [_make_string(inc.as_posix()) for inc in incs]
+    )
+
+
+def _render_meson_args(r: RenderBuffer, name: str, args: T.List[str]):
+    r.writeln(f"{name}: [")
+    with r.indent():
+        for arg in args:
+            r.writeln(f"{arg},")
+    r.writeln("],")
+
+
+def _render_module_stage0(
+    r: RenderBuffer,
+    vc: VarCache,
+    m: ExtensionModule,
+    meson_build_path: T.Optional[pathlib.Path],
+):
+
+    # variables generated here should be deterministic so that users can add
+    # their own things to it, or use it directly
+
+    r.writeln(f"# {m.package_name}")
+    r.writeln(f"{m.name}_sources = []")
+    r.writeln(f"{m.name}_deps = [declare_dependency(")
+    with r.indent():
+
+        if m.sources:
+            r.writeln("sources: [")
+            with r.indent():
+                for src in m.sources:
+                    r.writeln(f"{vc.getvar(src)},")
+
+            r.writeln("],")
+
+        if m.depends:
+            depnames = []
+            for d in m.depends:
+                if isinstance(d, LocalDependency):
+                    depnames.append(vc.getvar(d))
+                else:
+                    depnames.append(f"dependency({_make_string(d)})")
+
+            deps = ", ".join(depnames)
+            r.writeln(f"dependencies: [{deps}],")
+
+        if m.include_directories:
+            _render_include_directories(r, m.include_directories, meson_build_path)
+
+    r.writeln(")]")
+
+
+def _render_module_stage1(
+    r: RenderBuffer,
+    vc: VarCache,
+    m: ExtensionModule,
+    meson_build_path: T.Optional[pathlib.Path],
+):
+
+    # variables generated here should be deterministic so that users can
+    # use it directly if they wish
+
+    subdir = _make_string(m.install_path.as_posix())
+    module_name = _make_string(m.package_name.split(".")[-1])
+    mvar = vc.getvar(m)
+
+    r.writeln(f"# {m.package_name}")
+    r.writeln(f"{mvar} = sw_py.extension_module(")
+    with r.indent():
+        r.write_trim(
+            f"""
+            {module_name},
+            sources: [{m.name}_sources],
+            dependencies: [{m.name}_deps],
+            install: true,
+            subdir: {subdir},        
+        """
+        )
+
+        if m.include_directories:
+            _render_include_directories(r, m.include_directories, meson_build_path)
+
+    r.writeln(")")
+    r.writeln()
+
+
+def gen_meson(
+    project_root: pathlib.Path,
+    stage0_path: T.Optional[pathlib.Path],
+    stage1_path: T.Optional[pathlib.Path],
+    trampolines_path: T.Optional[pathlib.Path],
+) -> T.Tuple[str, str, str, T.List[Entrypoint]]:
+    """
+    Returns the contents of two meson.build files that build on each other, and
+    any entry points that need to be created
+    """
+
+    eps: T.List[Entrypoint] = []
+
+    r0 = RenderBuffer()
+    r1 = RenderBuffer()
+    t = RenderBuffer()
+    vc = VarCache()
+
+    # standard boilerplate here
+    r0.write_trim(
+        """
+        # This file is automatically generated, DO NOT EDIT
+        #
+        # The generator's stable API includes variables that do not start with
+        # an underscore. Any variables with an underscore may change in the future
+        # without warning
+        #
+                 
+        sw_py = import('python').find_installation()
+        
+        # internal commands for the autowrap machinery
+        _sw_cmd_gen_libinit_py = [sw_py, '-m', 'semiwrap.cmd_gen_libinit']
+        _sw_cmd_gen_pkgconf = [sw_py, '-m', 'semiwrap.cmd_gen_pkgconf']
+        _sw_cmd_publish_casters = [sw_py, '-m', 'semiwrap.cmd_publish_casters']
+        _sw_cmd_resolve_casters = [sw_py, '-m', 'semiwrap.cmd_resolve_casters']
+        _sw_cmd_header2dat = [sw_py, '-m', 'semiwrap.cmd_header2dat']
+        _sw_cmd_dat2cpp = [sw_py, '-m', 'semiwrap.cmd_dat2cpp']
+        _sw_cmd_dat2trampoline = [sw_py, '-m', 'semiwrap.cmd_dat2trampoline']
+        _sw_cmd_dat2tmplcpp = [sw_py, '-m', 'semiwrap.cmd_dat2tmplcpp']
+        _sw_cmd_dat2tmplhpp = [sw_py, '-m', 'semiwrap.cmd_dat2tmplhpp']
+        _sw_cmd_gen_modinit_hpp = [sw_py, '-m', 'semiwrap.cmd_gen_modinit_hpp']
+        _sw_cmd_make_pyi = [sw_py, '-m', 'semiwrap.cmd_make_pyi']
+
+        #
+        # internal custom targets for generating wrappers
+        #
+    """
+    )
+    r0.writeln()
+
+    # ... so, we could write out proper loops and stuff to make the output
+    #     meson file easier to read, but I think expanded custom targets
+    #     is simpler to generate
+
+    plan = makeplan(pathlib.Path(project_root))
+    macros: T.List[CppMacroValue] = []
+    build_targets: T.List[BuildTarget] = []
+    modules: T.List[ExtensionModule] = []
+    pyi_targets: T.List[BuildTarget] = []
+    local_deps: T.List[LocalDependency] = []
+    trampoline_targets: T.List[BuildTarget] = []
+
+    for item in plan:
+        if isinstance(item, BuildTarget):
+            if item.command == "make-pyi":
+                # defer these to the end
+                pyi_targets.append(item)
+            elif item.command == "dat2trampoline":
+                trampoline_targets.append(item)
+            else:
+                build_targets.append(item)
+        elif isinstance(item, ExtensionModule):
+            # defer these to the end
+            modules.append(item)
+        elif isinstance(item, Entrypoint):
+            eps.append(item)
+        elif isinstance(item, LocalDependency):
+            local_deps.append(item)
+        elif isinstance(item, CppMacroValue):
+            macros.append(item)
+        else:
+            assert False
+
+    if macros:
+        for macro in macros:
+            macro_name = _make_string(macro.name)
+            r0.writeln(
+                f"{vc.getvar(macro)} = meson.get_compiler('cpp').get_define({macro_name})"
+            )
+        r0.writeln()
+
+    for target in build_targets:
+        _render_build_target(r0, vc, target)
+
+    if local_deps:
+        r0.writeln()
+        r0.write_trim(
+            """
+            #
+            # Local dependencies
+            #
+        """
+        )
+        for ld in local_deps:
+            r0.writeln(f"{vc.getvar(ld)} = declare_dependency(")
+            with r0.indent():
+                if ld.depends:
+                    deps = []
+                    for dep in ld.depends:
+                        if isinstance(dep, LocalDependency):
+                            deps.append(vc.getvar(dep))
+                        else:
+                            deps.append(f"dependency({_make_string(dep)})")
+
+                    depstrs = ", ".join(deps)
+                    r0.writeln(f"dependencies: [{depstrs}],")
+
+                if ld.include_paths:
+                    _render_include_directories(r0, ld.include_paths, stage0_path)
+
+            r0.writeln(")")
+
+    if trampoline_targets:
+        t.writeln("# This file is automatically generated, DO NOT EDIT\n\n")
+        for target in trampoline_targets:
+            _render_build_target(t, vc, target)
+
+        r0.writeln()
+        r0.writeln("subdir('trampolines')")
+
+    if modules:
+        r0.writeln()
+        r0.write_trim(
+            """
+            #
+            # Module configurations
+            #
+        """
+        )
+        r0.writeln()
+
+        r1.writeln("# This file is automatically generated, DO NOT EDIT\n\n")
+
+        for module in modules:
+            _render_module_stage0(r0, vc, module, stage0_path)
+            _render_module_stage1(r1, vc, module, stage1_path)
+
+        # TODO: this conditional probably should be done in meson instead
+        # cannot build pyi files when cross-compiling
+        if not (
+            "_PYTHON_HOST_PLATFORM" in os.environ
+            or "PYTHON_CROSSENV" in os.environ
+            or os.environ.get("RPYBUILD_SKIP_PYI") == "1"
+        ):
+            for target in pyi_targets:
+                _render_build_target(r1, vc, target)
+
+    return r0.getvalue(), r1.getvalue(), t.getvalue(), eps
+
+
+def gen_meson_to_file(
+    project_root: pathlib.Path,
+    stage0: pathlib.Path,
+    stage1: pathlib.Path,
+    trampolines: pathlib.Path,
+) -> T.List[Entrypoint]:
+
+    # because of https://github.com/mesonbuild/meson/issues/2320
+    assert trampolines.parent.parent == stage0.parent
+
+    s0_content, s1_content, t_content, eps = gen_meson(
+        pathlib.Path(project_root), stage0, stage1, trampolines
+    )
+
+    maybe_write_file(stage0, s0_content, encoding="utf-8")
+    maybe_write_file(stage1, s1_content, encoding="utf-8")
+    maybe_write_file(trampolines, t_content, encoding="utf-8")
+
+    return eps
+
+
+def main():
+    def _usage() -> T.NoReturn:
+        print(f"{sys.argv[0]} project_root stage0 [stage1]", file=sys.stderr)
+        sys.exit(1)
+
+    # this entrypoint only for debugging
+    try:
+        _, project_root = sys.argv
+    except ValueError:
+        _usage()
+
+    s0_content, s1_content, t_content, _ = gen_meson(
+        pathlib.Path(project_root), None, None, None
+    )
+
+    print(s0_content)
+    print("\n---\n")
+    print(s1_content)
+    print("\n---\n")
+    print(t_content)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_header2dat.py b/src/semiwrap/cmd_header2dat.py
new file mode 100644
index 00000000..58c932c3
--- /dev/null
+++ b/src/semiwrap/cmd_header2dat.py
@@ -0,0 +1,143 @@
+"""
+Parses a header file and writes an intermediate dat file that other tools
+can turn into other things
+"""
+
+import argparse
+import inspect
+import os
+import json
+import pathlib
+import pickle
+import typing
+
+from cxxheaderparser.options import ParserOptions
+from cxxheaderparser import preprocessor
+
+from .autowrap.cxxparser import parse_header
+from .autowrap.generator_data import GeneratorData, MissingReporter
+from .config.autowrap_yml import AutowrapConfigYaml
+
+from .cmd_resolve_casters import CastersData
+
+# TODO: eventually provide native preprocessor by default and allow it
+#       to be enabled/disabled per-file just in case
+# TODO: should this also be a command line option?
+if os.getenv("RPYBUILD_PP_GCC") == "1":
+    # GCC preprocessor can be 10x faster than pcpp for very complex files
+    def make_preprocessor(*args, **kwargs):
+        return preprocessor.make_gcc_preprocessor(print_cmd=False, *args, **kwargs)
+
+else:
+    make_preprocessor = preprocessor.make_pcpp_preprocessor
+
+
+def generate_wrapper(
+    *,
+    name: str,
+    src_yml: pathlib.Path,
+    src_h: pathlib.Path,
+    src_h_root: pathlib.Path,
+    include_paths: typing.List[pathlib.Path],
+    pp_defines: typing.List[str],
+    casters: CastersData,
+    dst_dat: typing.Optional[pathlib.Path],
+    dst_depfile: typing.Optional[pathlib.Path],
+    missing_reporter: MissingReporter,
+    report_only: bool,
+):
+
+    try:
+        # semiwrap requires user to create yaml files first using create-gen
+        data = AutowrapConfigYaml.from_file(src_yml)
+    except FileNotFoundError:
+        if not report_only:
+            raise
+
+        print("WARNING: could not find", src_yml)
+        data = AutowrapConfigYaml()
+
+    deptarget = None
+    if dst_depfile is not None:
+        assert dst_dat is not None
+        deptarget = [str(dst_dat)]
+
+    popts = ParserOptions(
+        preprocessor=make_preprocessor(
+            defines=pp_defines,
+            include_paths=include_paths,
+            encoding=data.encoding,
+            depfile=dst_depfile,
+            deptarget=deptarget,
+        )
+    )
+
+    gendata = GeneratorData(data, src_yml)
+
+    try:
+        hctx = parse_header(
+            name,
+            src_h,
+            src_h_root,
+            gendata,
+            popts,
+            casters,
+            report_only,
+        )
+    except Exception as e:
+        raise ValueError(f"processing {src_h}") from e
+
+    gendata.report_missing(src_yml, missing_reporter)
+
+    if not report_only and missing_reporter and not data.defaults.ignore:
+        print("WARNING: some items not in", src_yml, "for", src_h)
+        for name, contents in missing_reporter.as_yaml():
+            print(contents)
+
+    if dst_dat is not None:
+        with open(dst_dat, "wb") as fp:
+            pickle.dump(hctx, fp)
+
+
+def make_argparser() -> argparse.ArgumentParser:
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("-I", "--include-paths", action="append", default=[])
+    parser.add_argument("-D", "--pp-defines", action="append", default=[])
+    parser.add_argument("--cpp")
+    parser.add_argument("name")
+    parser.add_argument("src_yml", type=pathlib.Path)
+    parser.add_argument("src_h", type=pathlib.Path)
+    parser.add_argument("src_h_root", type=pathlib.Path)
+    parser.add_argument("in_casters", type=pathlib.Path)
+    parser.add_argument("dst_dat", type=pathlib.Path)
+    parser.add_argument("dst_depfile", type=pathlib.Path)
+    return parser
+
+
+def main():
+    parser = make_argparser()
+    args = parser.parse_args()
+
+    with open(args.in_casters, "rb") as fp:
+        casters = pickle.load(fp)
+
+    if args.cpp:
+        args.pp_defines.append(f"__cplusplus {args.cpp}")
+
+    generate_wrapper(
+        name=args.name,
+        src_yml=args.src_yml,
+        src_h=args.src_h,
+        src_h_root=args.src_h_root,
+        dst_dat=args.dst_dat,
+        dst_depfile=args.dst_depfile,
+        include_paths=args.include_paths,
+        casters=casters,
+        pp_defines=args.pp_defines,
+        missing_reporter=MissingReporter(),
+        report_only=False,
+    )
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_make_pyi.py b/src/semiwrap/cmd_make_pyi.py
new file mode 100644
index 00000000..076b3492
--- /dev/null
+++ b/src/semiwrap/cmd_make_pyi.py
@@ -0,0 +1,128 @@
+"""
+Creates an output .pyi file from a given python module.
+
+Arguments are:
+    package outpath [subpackage outpath...] -- package mapped_file
+"""
+
+import importlib.util
+import inspect
+import os
+from os.path import dirname, join
+import pathlib
+import shutil
+import sys
+import tempfile
+import typing as T
+
+import pybind11_stubgen
+
+
+class _PackageFinder:
+    """
+    Custom loader to allow loading built modules from their location
+    in the build directory (as opposed to their install location)
+    """
+
+    # Set this to mapping returned from _BuiltEnv.setup_built_env
+    mapping: T.Dict[str, str] = {}
+
+    @classmethod
+    def find_spec(cls, fullname, path, target=None):
+        m = cls.mapping.get(fullname)
+        if m:
+            return importlib.util.spec_from_file_location(fullname, m)
+
+
+def _write_pyi(package_name, generated_pyi: T.Dict[pathlib.PurePath, pathlib.Path]):
+
+    # We can't control where stubgen writes files, so tell it to output
+    # to a temporary directory and then we copy the files from there to
+    # our desired location
+    with tempfile.TemporaryDirectory() as tmpdir:
+        tmpdir_pth = pathlib.Path(tmpdir)
+
+        # Call pybind11-stubgen
+        sys.argv = [
+            "<dummy>",
+            "--exit-code",
+            "--ignore-invalid-expressions=<.*>",
+            "--root-suffix=",
+            "-o",
+            tmpdir,
+            package_name,
+        ]
+
+        os.system(f"find {tmpdir_pth}")
+
+        # Create the parent directories in the temporary directory
+        for infile in generated_pyi.keys():
+            (tmpdir_pth / infile).parent.mkdir(parents=True, exist_ok=True)
+
+        pybind11_stubgen.main()
+
+        print("=" * 80, file=sys.stderr)
+        os.system(f"find {tmpdir_pth}")
+
+        # stubgen doesn't take a direct output filename, so move the file
+        # to our desired location
+        for infile, output in generated_pyi.items():
+            output.unlink(missing_ok=True)
+            shutil.move(tmpdir_pth / infile, output)
+
+
+def main():
+
+    generated_pyi: T.Dict[pathlib.PurePosixPath, pathlib.Path] = {}
+    argv = sys.argv
+
+    if len(argv) < 3:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    # Package name first
+    package_name = argv[1]
+
+    # Output file map: input output
+    idx = 2
+    while idx < len(argv):
+        if argv[idx] == "--":
+            idx += 1
+            break
+
+        generated_pyi[pathlib.PurePosixPath(argv[idx])] = pathlib.Path(argv[idx + 1])
+        idx += 2
+
+    # Arguments are used to set up the package map
+    package_map = _PackageFinder.mapping
+    for i in range(idx, len(argv), 2):
+        # python 3.9 requires paths to be resolved
+        package_map[argv[i]] = os.fspath(pathlib.Path(argv[i + 1]).resolve())
+
+    # Add parent packages too
+    # .. assuming there are __init__.py in each package
+    for pkg in list(package_map.keys()):
+        while True:
+            idx = pkg.rfind(".")
+            if idx == -1:
+                break
+            ppkg = pkg[:idx]
+            if ppkg not in package_map:
+                package_map[ppkg] = join(
+                    dirname(dirname(package_map[pkg])), "__init__.py"
+                )
+            pkg = ppkg
+
+    import pprint
+
+    pprint.pprint(package_map, sys.stderr)
+    pprint.pprint(generated_pyi, sys.stderr)
+    print("*" * 80, file=sys.stderr)
+
+    sys.meta_path.insert(0, _PackageFinder)
+
+    _write_pyi(package_name, generated_pyi)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_publish_casters.py b/src/semiwrap/cmd_publish_casters.py
new file mode 100644
index 00000000..07028c01
--- /dev/null
+++ b/src/semiwrap/cmd_publish_casters.py
@@ -0,0 +1,91 @@
+"""
+Determines which casters are in the pyproject.toml and publishes them so they
+can be consumed and used by other wheels.
+
+Generates a FOO.pc file and a FOO.pybind11.json file
+"""
+
+import pathlib
+import sys
+
+from .casters import TypeCasterJsonData, TypeCasterJsonHeader, save_typecaster_json_data
+from .mkpc import make_pc_file
+from .pyproject import PyProject
+
+
+def main():
+    try:
+        _, pyproject_toml, caster_name, output_json, output_pc = sys.argv
+    except ValueError:
+        print(__doc__, file=sys.stderr)
+        sys.exit(1)
+
+    project = PyProject(pathlib.Path(pyproject_toml))
+    cfg = project.project.export_type_casters[caster_name]
+
+    # make sure the include directories actually exist
+    include_dirs = []
+    for inc in cfg.includedir:
+        includedir = project.root / pathlib.Path(inc)
+        include_dirs.append(includedir)
+        if not includedir.exists():
+            print(f"ERROR: {includedir} does not exist", file=sys.stderr)
+            print(
+                f"- specified at [tool.semiwrap.export_type_casters.{caster_name}].includedir",
+                file=sys.stderr,
+            )
+            sys.exit(1)
+
+    pc_install_path = project.package_root / pathlib.Path(*cfg.pypackage.split("."))
+    make_pc_file(
+        project_root=project.root,
+        pcfile=pathlib.Path(output_pc),
+        pc_install_path=pc_install_path,
+        name=caster_name,
+        desc="pybind11 type casters",
+        version="",
+        includes=cfg.includedir,
+        depends=cfg.requires,
+        libinit_py=None,
+        generator_name="semiwrap.cmd_publish_casters",
+    )
+
+    #
+    # Gather the data and write it next to the pc file
+    #
+
+    data = TypeCasterJsonData()
+    for hdr in cfg.headers:
+
+        # Ensure the published header actually exists
+        searched = []
+        for inc in include_dirs:
+            full_hdr = inc / hdr.header
+            if full_hdr.exists():
+                break
+
+            searched.append(full_hdr)
+        else:
+
+            print(f"ERROR: {hdr.header} does not exist", file=sys.stderr)
+            print(
+                f"- specified at [[tool.semiwrap.export_type_casters.{caster_name}.headers]].header",
+                file=sys.stderr,
+            )
+            for s in searched:
+                print(f"- searched '{s}'")
+            sys.exit(1)
+
+        data.headers.append(
+            TypeCasterJsonHeader(
+                header=hdr.header,
+                types=hdr.types,
+                default_arg_cast=hdr.default_arg_cast,
+            )
+        )
+
+    save_typecaster_json_data(pathlib.Path(output_json), data)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/cmd_resolve_casters.py b/src/semiwrap/cmd_resolve_casters.py
new file mode 100644
index 00000000..a55d366a
--- /dev/null
+++ b/src/semiwrap/cmd_resolve_casters.py
@@ -0,0 +1,71 @@
+"""
+Creates a per-module data file mapping type names to header files containing
+pybind11 type caster implementations.
+"""
+
+import inspect
+import pathlib
+import pickle
+import sys
+
+from .casters import CastersData, load_typecaster_json_data, TypeData
+from .depfile import Depfile
+
+
+def _update_all_casters(type_caster_cfg: pathlib.Path, all_casters: CastersData):
+    data = load_typecaster_json_data(type_caster_cfg)
+
+    # flatten it
+    for item in data.headers:
+        header = pathlib.Path(item.header)
+        if header.is_absolute():
+            raise ValueError(
+                f"{type_caster_cfg} contains absolute path to header: {header}"
+            )
+
+        for typ in item.types:
+            td = TypeData(
+                header=header, typename=typ, default_arg_cast=item.default_arg_cast
+            )
+
+            if typ not in all_casters:
+                all_casters[typ] = td
+
+            # in addition to the type, add a non-namespaced version too
+            # - in theory this could cause conflicts, but in practice its useful
+            # - this could be solved by making the parser resolve namespaces, but
+            #   that has downsides too
+            ntyp = typ.split("::")[-1]
+            if ntyp not in all_casters:
+                all_casters[ntyp] = td
+
+
+def main():
+    try:
+        _, outfile_arg, depfile_arg = sys.argv[:3]
+        caster_json_files = sys.argv[3:]
+    except ValueError:
+        print(inspect.cleandoc(__doc__ or ""), file=sys.stderr)
+        sys.exit(1)
+
+    outfile = pathlib.Path(outfile_arg)
+    depfile = pathlib.Path(depfile_arg)
+
+    d = Depfile(outfile)
+    content: CastersData = {}
+
+    for f in caster_json_files:
+        path = pathlib.Path(f)
+        d.add(path)
+        _update_all_casters(path, content)
+
+    # write the depfile
+    d.write(depfile)
+
+    # write the pickled data
+    with open(outfile, "wb") as fp:
+        pickle.dump(content, fp)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/robotpy_build/command/update_init.py b/src/semiwrap/cmd_update_init.py
similarity index 100%
rename from robotpy_build/command/update_init.py
rename to src/semiwrap/cmd_update_init.py
diff --git a/robotpy_build/command/__init__.py b/src/semiwrap/config/__init__.py
similarity index 100%
rename from robotpy_build/command/__init__.py
rename to src/semiwrap/config/__init__.py
diff --git a/robotpy_build/config/autowrap_yml.py b/src/semiwrap/config/autowrap_yml.py
similarity index 80%
rename from robotpy_build/config/autowrap_yml.py
rename to src/semiwrap/config/autowrap_yml.py
index 24e6c6df..27414a7d 100644
--- a/robotpy_build/config/autowrap_yml.py
+++ b/src/semiwrap/config/autowrap_yml.py
@@ -3,15 +3,17 @@
 # to modify the generated files
 #
 
+import dataclasses
 import enum
-from typing import Dict, List, Tuple, Optional
+from typing import Dict, List, Tuple, Optional, Union
 
-from pydantic import validator, Field
-from .util import Model, _generating_documentation
 import yaml
 
+from .util import fix_yaml_dict, parse_input
 
-class ParamData(Model):
+
+@dataclasses.dataclass(frozen=True)
+class ParamData:
     """Various ways to modify parameters"""
 
     #: Set parameter name to this
@@ -58,7 +60,8 @@ class BufferType(str, enum.Enum):
     INOUT = "inout"
 
 
-class BufferData(Model):
+@dataclasses.dataclass(frozen=True)
+class BufferData:
     #: Indicates what type of python buffer is required
     type: BufferType
 
@@ -80,34 +83,19 @@ class ReturnValuePolicy(enum.Enum):
     for what each of these values mean.
     """
 
-    TAKE_OWNERSHIP = "take_ownership"
-    COPY = "copy"
-    MOVE = "move"
-    REFERENCE = "reference"
-    REFERENCE_INTERNAL = "reference_internal"
-    AUTOMATIC = "automatic"
-    AUTOMATIC_REFERENCE = "automatic_reference"
+    take_ownership = "take_ownership"
+    copy = "copy"
+    move = "move"
+    reference = "reference"
+    reference_internal = "reference_internal"
+    automatic = "automatic"
+    automatic_reference = "automatic_reference"
 
 
-class FunctionData(Model):
+@dataclasses.dataclass(frozen=True)
+class OverloadData:
     """
-    Customize the way the autogenerator binds a function.
-
-    .. code-block:: yaml
-
-       functions:
-         # for non-overloaded functions, just specify the name + customizations
-         name_of_non_overloaded_fn:
-           # add customizations for function here
-
-         # For overloaded functions, specify the name, but each overload
-         # separately
-         my_overloaded_fn:
-           overloads:
-             int, int:
-               # customizations for `my_overloaded_fn(int, int)`
-             int, int, int:
-               # customizations for `my_overloaded_fn(int, int, int)`
+    .. seealso:: :class:`.FunctionData`
     """
 
     #: If True, don't wrap this
@@ -150,18 +138,16 @@ class FunctionData(Model):
     rename: Optional[str] = None
 
     #: Mechanism to override individual parameters
-    param_override: Dict[str, ParamData] = {}
+    param_override: Dict[str, ParamData] = dataclasses.field(default_factory=dict)
 
     #: If specified, put the function in a sub.pack.age
     subpackage: Optional[str] = None
 
-    #: By default, robotpy-build will release the GIL whenever a wrapped
+    #: By default, semiwrap will release the GIL whenever a wrapped
     #: function is called.
     no_release_gil: Optional[bool] = None
 
-    buffers: List[BufferData] = []
-
-    overloads: Dict[str, "FunctionData"] = {}
+    buffers: List[BufferData] = dataclasses.field(default_factory=list)
 
     #: Adds py::keep_alive<x,y> to the function. Overrides automatic
     #: keepalive support, which retains references passed to constructors.
@@ -169,7 +155,7 @@ class FunctionData(Model):
     keepalive: Optional[List[Tuple[int, int]]] = None
 
     #: https://pybind11.readthedocs.io/en/stable/advanced/functions.html#return-value-policies
-    return_value_policy: ReturnValuePolicy = ReturnValuePolicy.AUTOMATIC
+    return_value_policy: ReturnValuePolicy = ReturnValuePolicy.automatic
 
     #: If this is a function template, this is a list of instantiations
     #: that you wish to provide. This is a list of lists, where the inner
@@ -203,24 +189,30 @@ class FunctionData(Model):
     #:
     virtual_xform: Optional[str] = None
 
-    @validator("overloads", pre=True)
-    def validate_overloads(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = FunctionData()
-        return value
 
-    @validator("virtual_xform")
-    def validate_virtual_xform(cls, v, values):
-        if v and values.get("trampoline_cpp_code"):
-            raise ValueError(
-                "cannot specify trampoline_cpp_code and virtual_xform for the same method"
-            )
-        return v
+@dataclasses.dataclass(frozen=True)
+class FunctionData(OverloadData):
+    """
+    Customize the way the autogenerator binds a function.
+
+    .. code-block:: yaml
+
+       functions:
+         # for non-overloaded functions, just specify the name + customizations
+         name_of_non_overloaded_fn:
+           # add customizations for function here
 
+         # For overloaded functions, specify the name, but each overload
+         # separately
+         my_overloaded_fn:
+           overloads:
+             int, int:
+               # customizations for `my_overloaded_fn(int, int)`
+             int, int, int:
+               # customizations for `my_overloaded_fn(int, int, int)`
+    """
 
-if not _generating_documentation:
-    FunctionData.update_forward_refs()
+    overloads: Dict[str, OverloadData] = dataclasses.field(default_factory=dict)
 
 
 class PropAccess(enum.Enum):
@@ -231,18 +223,19 @@ class PropAccess(enum.Enum):
     #: * If a struct/union, default to readwrite
     #: * If a class, default to readwrite if a basic type that isn't a
     #:   reference, otherwise default to readonly
-    AUTOMATIC = "auto"
+    auto = "auto"
 
     #: Allow python users access to the value, but ensure it can't
     #: change. This is useful for properties that are defined directly
     #: in the class
-    READONLY = "readonly"
+    readonly = "readonly"
 
     #: Allows python users to read/write the value
-    READWRITE = "readwrite"
+    readwrite = "readwrite"
 
 
-class PropData(Model):
+@dataclasses.dataclass(frozen=True)
+class PropData:
     #: If set to True, this property is not made available to python
     ignore: bool = False
 
@@ -250,7 +243,7 @@ class PropData(Model):
     rename: Optional[str] = None
 
     #: Python code access to this property
-    access: PropAccess = PropAccess.AUTOMATIC
+    access: PropAccess = PropAccess.auto
 
     #: Docstring for the property (only available on class properties)
     doc: Optional[str] = None
@@ -259,7 +252,8 @@ class PropData(Model):
     doc_append: Optional[str] = None
 
 
-class EnumValue(Model):
+@dataclasses.dataclass(frozen=True)
+class EnumValue:
     #: If set to True, this property is not made available to python
     ignore: bool = False
 
@@ -273,7 +267,8 @@ class EnumValue(Model):
     doc_append: Optional[str] = None
 
 
-class EnumData(Model):
+@dataclasses.dataclass(frozen=True)
+class EnumData:
     #: Set your own docstring for the enum
     doc: Optional[str] = None
 
@@ -293,7 +288,7 @@ class EnumData(Model):
     #: enums that are part of classes)
     subpackage: Optional[str] = None
 
-    values: Dict[str, EnumValue] = {}
+    values: Dict[str, EnumValue] = dataclasses.field(default_factory=dict)
 
     #: This will insert code right before the semicolon ending the enum py
     #: definition. You can use this to easily insert additional custom values
@@ -306,7 +301,8 @@ class EnumData(Model):
     arithmetic: bool = False
 
 
-class ClassData(Model):
+@dataclasses.dataclass(frozen=True)
+class ClassData:
     #: Docstring for the class
     doc: Optional[str] = None
 
@@ -316,16 +312,16 @@ class ClassData(Model):
     ignore: bool = False
 
     #: List of bases to ignore. Name must include any template specializations.
-    ignored_bases: List[str] = []
+    ignored_bases: List[str] = dataclasses.field(default_factory=list)
 
     #: Specify fully qualified names for the bases. If the base has a template
     #: parameter, you must include it. Only needed if it can't be automatically
     #: detected directly from the text.
-    base_qualnames: Dict[str, str] = {}
+    base_qualnames: Dict[str, str] = dataclasses.field(default_factory=dict)
 
-    attributes: Dict[str, PropData] = {}
-    enums: Dict[str, EnumData] = {}
-    methods: Dict[str, FunctionData] = {}
+    attributes: Dict[str, PropData] = dataclasses.field(default_factory=dict)
+    enums: Dict[str, EnumData] = dataclasses.field(default_factory=dict)
+    methods: Dict[str, FunctionData] = dataclasses.field(default_factory=dict)
 
     is_polymorphic: Optional[bool] = None
     force_no_trampoline: bool = False
@@ -340,13 +336,13 @@ class ClassData(Model):
 
     #: If there are circular dependencies, this will help you resolve them
     #: manually. TODO: make it so we don't need this
-    force_depends: List[str] = []
+    force_depends: List[str] = dataclasses.field(default_factory=list)
 
     #: Use this to bring in type casters for a particular type that may have
     #: been hidden (for example, with a typedef or definition in another file),
     #: instead of explicitly including the header. This should be the full
     #: namespace of the type.
-    force_type_casters: List[str] = []
+    force_type_casters: List[str] = dataclasses.field(default_factory=list)
 
     #: If the object shouldn't be deleted by pybind11, use this. Disables
     #: implicit constructors.
@@ -366,11 +362,11 @@ class ClassData(Model):
 
     #: Extra 'using' directives to insert into the trampoline and the
     #: wrapping scope
-    typealias: List[str] = []
+    typealias: List[str] = dataclasses.field(default_factory=list)
 
     #: Fully-qualified pre-existing constant that will be inserted into the
     #: trampoline and wrapping scopes as a constexpr
-    constants: List[str] = []
+    constants: List[str] = dataclasses.field(default_factory=list)
 
     #: If this is a template class, a list of the parameters if it can't
     #: be autodetected (currently can't autodetect). If there is no space
@@ -400,29 +396,9 @@ class ClassData(Model):
     #:
     inline_code: Optional[str] = None
 
-    @validator("attributes", pre=True)
-    def validate_attributes(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = PropData()
-        return value
-
-    @validator("enums", pre=True)
-    def validate_enums(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = EnumData()
-        return value
-
-    @validator("methods", pre=True)
-    def validate_methods(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = FunctionData()
-        return value
-
-
-class TemplateData(Model):
+
+@dataclasses.dataclass(frozen=True)
+class TemplateData:
     """
     Instantiates a template as a python type. To customize the class,
     add it to the ``classes`` key and specify the template type.
@@ -455,7 +431,7 @@ class MyClass {};
     qualname: str
 
     #: Template parameters to use
-    params: List[str]
+    params: List[Union[str, int]]
 
     #: If specified, put the template instantiation in a sub.pack.age
     subpackage: Optional[str] = None
@@ -467,7 +443,8 @@ class MyClass {};
     doc_append: Optional[str] = None
 
 
-class Defaults(Model):
+@dataclasses.dataclass(frozen=True)
+class Defaults:
     """
     Defaults to apply to everything
     """
@@ -480,24 +457,25 @@ class Defaults(Model):
     report_ignored_missing: bool = True
 
 
-class AutowrapConfigYaml(Model):
+@dataclasses.dataclass(frozen=True)
+class AutowrapConfigYaml:
     """
     Format of the file in [tool.robotpy-build.wrappers."PACKAGENAME"]
     generation_data
     """
 
-    defaults: Defaults = Field(default_factory=Defaults)
+    defaults: Defaults = dataclasses.field(default_factory=Defaults)
 
-    strip_prefixes: List[str] = []
+    strip_prefixes: List[str] = dataclasses.field(default_factory=list)
 
     #: Adds ``#include <FILENAME>`` directives to the top of the autogenerated
     #: C++ file, after autodetected include dependencies are inserted.
-    extra_includes: List[str] = []
+    extra_includes: List[str] = dataclasses.field(default_factory=list)
 
     #: Adds ``#include <FILENAME>`` directives after robotpy_build.h is
     #: included, but before any autodetected include dependencies. Only use
     #: this when dealing with broken headers.
-    extra_includes_first: List[str] = []
+    extra_includes_first: List[str] = dataclasses.field(default_factory=list)
 
     #: Specify raw C++ code that will be inserted at the end of the
     #: autogenerated file, inside a function. This is useful for extending
@@ -530,9 +508,9 @@ class AutowrapConfigYaml(Model):
     #:      my_variable:
     #:        # customizations here, see PropData
     #:
-    attributes: Dict[str, PropData] = {}
+    attributes: Dict[str, PropData] = dataclasses.field(default_factory=dict)
 
-    #: Key is the class name
+    #: Key is the namespace + class name
     #:
     #: .. code-block:: yaml
     #:
@@ -540,7 +518,7 @@ class AutowrapConfigYaml(Model):
     #:      CLASSNAME:
     #:        # customizations here, see ClassData
     #:
-    classes: Dict[str, ClassData] = {}
+    classes: Dict[str, ClassData] = dataclasses.field(default_factory=dict)
 
     #: Key is the function name
     #:
@@ -550,7 +528,7 @@ class AutowrapConfigYaml(Model):
     #:      fn_name:
     #:        # customizations here, see FunctionData
     #:
-    functions: Dict[str, FunctionData] = {}
+    functions: Dict[str, FunctionData] = dataclasses.field(default_factory=dict)
 
     #: Key is the enum name, for enums at global scope
     #:
@@ -560,7 +538,7 @@ class AutowrapConfigYaml(Model):
     #:      MyEnum:
     #:        # customizations here, see EnumData
     #:
-    enums: Dict[str, EnumData] = {}
+    enums: Dict[str, EnumData] = dataclasses.field(default_factory=dict)
 
     #: Instantiates a template. Key is the name to give to the Python type.
     #:
@@ -570,43 +548,15 @@ class AutowrapConfigYaml(Model):
     #:      ClassName:
     #:        # customizations here, see TemplateData
     #:
-    templates: Dict[str, TemplateData] = {}
+    templates: Dict[str, TemplateData] = dataclasses.field(default_factory=dict)
 
     #: Extra 'using' directives to insert into the trampoline and the
     #: wrapping scope
-    typealias: List[str] = []
+    typealias: List[str] = dataclasses.field(default_factory=list)
 
     #: Encoding to use when opening this header file
     encoding: str = "utf-8-sig"
 
-    @validator("attributes", pre=True)
-    def validate_attributes(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = PropData()
-        return value
-
-    @validator("classes", pre=True)
-    def validate_classes(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = ClassData()
-        return value
-
-    @validator("enums", pre=True)
-    def validate_enums(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = EnumData()
-        return value
-
-    @validator("functions", pre=True)
-    def validate_functions(cls, value):
-        for k, v in value.items():
-            if v is None:
-                value[k] = FunctionData()
-        return value
-
     @classmethod
     def from_file(cls, fname) -> "AutowrapConfigYaml":
         with open(fname) as fp:
@@ -615,4 +565,6 @@ def from_file(cls, fname) -> "AutowrapConfigYaml":
         if data is None:
             data = {}
 
-        return cls(**data)
+        data = fix_yaml_dict(data)
+
+        return parse_input(data, cls, fname)
diff --git a/src/semiwrap/config/pyproject_toml.py b/src/semiwrap/config/pyproject_toml.py
new file mode 100644
index 00000000..eb1ee981
--- /dev/null
+++ b/src/semiwrap/config/pyproject_toml.py
@@ -0,0 +1,198 @@
+#
+# pyproject.toml
+#
+
+import dataclasses
+import re
+from typing import Dict, List, Optional
+
+_arch_re = re.compile(r"\{\{\s*ARCH\s*\}\}")
+_os_re = re.compile(r"\{\{\s*OS\s*\}\}")
+
+
+@dataclasses.dataclass
+class TypeCasterHeader:
+
+    #: Header file to include when one of the types are detected in a wrapper
+    header: str
+
+    #: Types to look for to indicate that this type caster header should be
+    #: included.
+    types: List[str]
+
+    #: If a parameter type that requires this type caster requires a default
+    #: argument, a C-style ``(type)`` cast is used on the default argument.
+    #:
+    #: The default cast can be disabled via param_override's ``disable_type_caster_default_cast``
+    default_arg_cast: bool = False
+
+
+@dataclasses.dataclass
+class TypeCasterConfig:
+    """
+    Specifies type casters to be exported as a pkgconf package. When a semiwrap package has a
+    dependency on such a package, it will use this type information at generation time and
+    include the type caster headers in generated wrappers.
+
+    .. code-block:: toml
+
+       [tool.semiwrap.export_type_casters.NAME]
+       pypackage = "mypkg"
+       includedirs = ["src/mypkg/type-casters"]
+
+       [[tool.semiwrap.export_type_casters.NAME.headers]]
+       header = "my_type_caster.h"
+       types = ["foo_t", "ns::ins::bar_t"]
+
+    .. seealso:: :ref:`type_casters`
+    """
+
+    #: Python package to write the .pc and .json file to
+    pypackage: str
+
+    #: Location of type caster header files, relative to the root of the project
+    includedir: List[str]
+
+    #: pkgconf 'requires'
+    requires: List[str] = dataclasses.field(default_factory=list)
+
+    #: Contains header/type mappings for type casters
+    headers: List[TypeCasterHeader] = dataclasses.field(default_factory=list)
+
+
+@dataclasses.dataclass
+class ExtensionModuleConfig:
+    """
+    Configuration for generating pybind11 wrappers around specified header files.
+
+    .. code-block:: toml
+
+        [tool.semiwrap.extension_modules."PACKAGE.NAME"]
+
+    """
+
+    #: The name used internally to refer to this extension module. The name is
+    #: used as the prefix of variable names in build files.
+    #:
+    #: If not specified, the default name is the package name with invalid
+    #: characters replaced by underscores.
+    name: Optional[str] = None
+
+    #: Name of generated file that ensures the shared libraries and any
+    #: dependencies are loaded. Defaults to ``_init_XXX.py``, where XXX
+    #: is the last element of the package name
+    #:
+    #: Generally, you should create an ``__init__.py`` file that imports
+    #: this module, otherwise your users will need to do so.
+    libinit: Optional[str] = None
+
+    #: Name of pkgconf libraries that this is wrapping. This is also added to
+    #: depends list.
+    wraps: List[str] = dataclasses.field(default_factory=list)
+
+    #: List of pkgconf library dependencies. This affects this wrapper
+    #: library in the following ways:
+    #:
+    #: * Any include file directories exported by the dependency will be added
+    #:   to the include path for any source files compiled by this wrapper
+    #: * It will be linked to any libraries the dependency contains
+    #: * The python module for the dependency will be imported in the
+    #:   ``_init{extension}.py`` file.
+    depends: List[str] = dataclasses.field(default_factory=list)
+
+    #: List of include directories for this module relative to the project root
+    includes: List[str] = dataclasses.field(default_factory=list)
+
+    #: Specifies header files that autogenerated pybind11 wrappers will be
+    #: created for. Simple C++ headers will most likely 'just work', but
+    #: complex headers will need to have an accompanying :attr:`generation_data`
+    #: file specified that can customize the autogenerated files.
+    #:
+    #: List of dictionaries: each dictionary key is used for the function
+    #: name of the initialization function, the value is the header that is
+    #: being wrapped. The header is searched for in the following order:
+    #:
+    #: - Relative to include directories specified by 'wraps' dependencies
+    #: - Relative to the extra include directories for this module
+    #: - Relative to the package directory
+    #:
+    #: .. code-block:: toml
+    #:
+    #:    [tool.semiwrap.extension_modules."PACKAGE.NAME".headers]
+    #:    Name = "header.h"
+    #:
+    #: .. seealso:: :ref:`autowrap`
+    #:
+    headers: Dict[str, str] = dataclasses.field(default_factory=dict)
+
+    #: Path to a directory of yaml files. Generation data will be looked up
+    #: using the key in the headers dictionary.
+    #:
+    #: These YAML files can be generated via the semiwrap command line tool:
+    #:
+    #: .. code-block:: sh
+    #:
+    #:    semiwrap create-gen --write
+    #:
+    #: .. seealso:: :ref:`gendata`
+    #:
+    yaml_path: Optional[str] = None
+
+    #: Preprocessor definitions to apply when compiling this wrapper.
+    # pp_defines: List[str] = dataclasses.field(default_factory=list)
+
+    #: If True, skip this wrapper; typically used in conjection with an override.
+    ignore: bool = False
+
+
+@dataclasses.dataclass
+class SemiwrapToolConfig:
+    """
+    Contains information for configuring the project
+
+    .. code-block:: toml
+
+       [tool.semiwrap]
+
+    .. note:: This section is required
+    """
+
+    #: List of headers for the scan-headers tool to ignore
+    scan_headers_ignore: List[str] = dataclasses.field(default_factory=list)
+
+    #: List of python packages with __init__.py to update when ``python setup.py update_init``
+    #: is called -- this is an argument to the ``semiwrap create-imports`` command, and
+    #: may contain a space and the second argument to create-imports.
+    # update_init: List[str] = []
+
+    #: Modules to autogenerate. The key is the name of the python package that will
+    #: be generated. For example, "package.name" will cause the shared library
+    #: 'package/name-{abi tag}.so' to be installed
+    extension_modules: Dict[str, ExtensionModuleConfig] = dataclasses.field(
+        default_factory=dict
+    )
+
+    #: Type casters to export. The key is the pkgconf package name.
+    export_type_casters: Dict[str, TypeCasterConfig] = dataclasses.field(
+        default_factory=dict
+    )
+
+
+@dataclasses.dataclass
+class SemiwrapHatchlingConfig:
+    """
+    semiwrap hatchling hook configuration
+
+    .. code-block:: toml
+
+       [tool.hatch.build.hooks.semiwrap]
+
+    .. note:: This section is required
+    """
+
+    #: Output directory to write meson.build to
+    autogen_build_path: str = "wrapcfg"
+
+    #: Output directory to write second stage meson.build to (defaults
+    #: to autogen_build_path / 'modules')
+    module_build_path: Optional[str] = None
diff --git a/src/semiwrap/config/util.py b/src/semiwrap/config/util.py
new file mode 100644
index 00000000..fc5879f0
--- /dev/null
+++ b/src/semiwrap/config/util.py
@@ -0,0 +1,57 @@
+import typing
+
+from validobj import errors
+import validobj.validation
+
+T = typing.TypeVar("T")
+
+
+class ValidationError(Exception):
+    pass
+
+
+def _convert_validation_error(fname, ve: errors.ValidationError) -> ValidationError:
+    locs = []
+    msg = []
+
+    e = ve
+    while e is not None:
+
+        if isinstance(e, errors.WrongFieldError):
+            locs.append(f".{e.wrong_field}")
+        elif isinstance(e, errors.WrongListItemError):
+            locs.append(f"[{e.wrong_index}]")
+        else:
+            msg.append(str(e))
+
+        e = e.__cause__
+
+    loc = "".join(locs)
+    if loc.startswith("."):
+        loc = loc[1:]
+    msg = "\n  ".join(msg)
+    vmsg = f"{fname}: {loc}:\n  {msg}"
+    return ValidationError(vmsg)
+
+
+def parse_input(value: typing.Any, spec: typing.Type[T], fname) -> T:
+    try:
+        return validobj.validation.parse_input(value, spec)
+    except errors.ValidationError as ve:
+        raise _convert_validation_error(fname, ve) from None
+
+
+# yaml converts empty values to None, but we never want that
+def fix_yaml_dict(a: typing.Any):
+    if isinstance(a, dict):
+        for k, v in a.items():
+            if v is None:
+                a[k] = {}
+            if isinstance(v, dict):
+                fix_yaml_dict(v)
+    elif isinstance(a, list):
+        for v in a:
+            if isinstance(v, dict):
+                fix_yaml_dict(v)
+
+    return a
diff --git a/src/semiwrap/depfile.py b/src/semiwrap/depfile.py
new file mode 100644
index 00000000..20c79fcb
--- /dev/null
+++ b/src/semiwrap/depfile.py
@@ -0,0 +1,32 @@
+import dataclasses
+import os
+import pathlib
+import typing as T
+
+
+def _escape_dep(dep: str):
+    dep = dep.replace("\\", "\\\\")
+    dep = dep.replace(" ", "\\ ")
+    return dep
+
+
+@dataclasses.dataclass
+class Depfile:
+    # TODO: currently only supports single output target
+
+    target: pathlib.Path
+    deps: T.List[pathlib.Path] = dataclasses.field(default_factory=list)
+
+    def add(self, dep: pathlib.Path):
+        self.deps.append(dep)
+
+    def write(self, path: pathlib.Path):
+        """
+        Write make-compatible depfile
+        """
+        with open(path, "w") as fp:
+            target = _escape_dep(str(self.target.absolute()))
+            fp.write(f"{target}:")
+            for dep in self.deps:
+                fp.write(f" \\\n  {_escape_dep(str(dep.absolute()))}")
+            fp.write("\n")
diff --git a/src/semiwrap/hooks.py b/src/semiwrap/hooks.py
new file mode 100644
index 00000000..9d6cd012
--- /dev/null
+++ b/src/semiwrap/hooks.py
@@ -0,0 +1,108 @@
+# TODO: rename this
+
+import os
+import pathlib
+import sys
+import traceback
+import typing as T
+
+from hatchling.plugin import hookimpl
+from hatchling.builders.hooks.plugin.interface import BuildHookInterface
+
+from .cmd_genmeson import gen_meson_to_file
+
+from .config.pyproject_toml import SemiwrapHatchlingConfig
+from .config.util import parse_input
+
+
+@hookimpl
+def hatch_register_build_hook():
+    return SemiwrapBuildHook
+
+
+class SemiwrapBuildHook(BuildHookInterface):
+    """
+    Sets up code generation to be ran by meson
+    """
+
+    PLUGIN_NAME = "semiwrap"
+
+    def initialize(self, version: str, build_data: T.Dict[str, T.Any]) -> None:
+        # Only needed for building wheels
+        if self.target_name != "wheel":
+            return
+
+        project_root = pathlib.Path(self.root).resolve()
+
+        config = parse_input(
+            self.config, SemiwrapHatchlingConfig, "[tool.hatch.build.hooks.semiwrap]"
+        )
+        stage0_build_path = project_root / config.autogen_build_path
+        stage0_meson_build = stage0_build_path / "meson.build"
+        stage0_gitignore = stage0_build_path / ".gitignore"
+
+        if config.module_build_path is not None:
+            stage1_build_path = project_root / config.module_build_path
+        else:
+            stage1_build_path = stage0_build_path / "modules"
+
+        stage1_meson_build = stage1_build_path / "meson.build"
+
+        # This is used to generate files installed into a directory called 'trampolines'
+        # so due to https://github.com/mesonbuild/meson/issues/2320 this must also be
+        # in a directory called 'trampolines'
+        # - unlike stage0 and stage1, this is included by stage0
+        trampoline_build_path = stage0_build_path / "trampolines"
+        trampoline_meson_build = trampoline_build_path / "meson.build"
+
+        try:
+            eps = gen_meson_to_file(
+                project_root,
+                stage0_meson_build,
+                stage1_meson_build,
+                trampoline_meson_build,
+            )
+        except Exception as e:
+            # Reading the stack trace is annoying, most of the time the exception content
+            # is enough to figure out what you did wrong.
+            if os.environ.get("SEMIWRAP_ERROR_VERBOSE") == "1":
+                raise
+
+            msg = [
+                "ERROR: exception occurred when processing `pyproject.toml`\n\n",
+            ]
+
+            msg += traceback.format_exception_only(type(e), e)
+            cause = e.__context__
+            while cause is not None:
+                if "prepare_metadata_for_build_editable" in str(cause):
+                    break
+
+                el = traceback.format_exception_only(type(cause), cause)
+                el[0] = f"- caused by {el[0]}"
+                msg += el
+
+                if cause.__suppress_context__:
+                    break
+
+                cause = cause.__context__
+
+            msg.append(
+                "\nSet environment variable SEMIWRAP_ERROR_VERBOSE=1 for stacktrace"
+            )
+
+            print("".join(msg), file=sys.stderr)
+            sys.exit(1)
+
+        if eps:
+            # .. not documented but it works?
+            for ep in eps:
+                g = self.metadata.core.entry_points.setdefault(ep.group, {})
+                g[ep.name] = ep.package
+
+        if not stage0_gitignore.exists():
+            stage0_gitignore.write_text(
+                "/meson.build\n"
+                f"/{stage1_build_path.name}/meson.build\n"
+                f"/{trampoline_build_path.name}/meson.build\n"
+            )
diff --git a/robotpy_build/include/gilsafe_object.h b/src/semiwrap/include/gilsafe_object.h
similarity index 100%
rename from robotpy_build/include/gilsafe_object.h
rename to src/semiwrap/include/gilsafe_object.h
diff --git a/robotpy_build/include/pybind11_typing.h b/src/semiwrap/include/pybind11_typing.h
similarity index 100%
rename from robotpy_build/include/pybind11_typing.h
rename to src/semiwrap/include/pybind11_typing.h
diff --git a/robotpy_build/include/robotpy_build.h b/src/semiwrap/include/robotpy_build.h
similarity index 97%
rename from robotpy_build/include/robotpy_build.h
rename to src/semiwrap/include/robotpy_build.h
index b585fb96..6e79712f 100644
--- a/robotpy_build/include/robotpy_build.h
+++ b/src/semiwrap/include/robotpy_build.h
@@ -26,8 +26,6 @@ using py::raise_from;
     "overrides for that method. It is likely the following compiler error " \
     "messages will tell you which one it is."
 
-// Use this to define your module instead of PYBIND11_MODULE
-#define RPYBUILD_PYBIND11_MODULE(variable) PYBIND11_MODULE(RPYBUILD_MODULE_NAME, variable)
 
 // only for use by RPYBUILD_OVERRIDE_PURE_POST_IMPL
 template <class T> py::handle __get_handle(const T *this_ptr) {
diff --git a/src/semiwrap/makeplan.py b/src/semiwrap/makeplan.py
new file mode 100644
index 00000000..8bc48afc
--- /dev/null
+++ b/src/semiwrap/makeplan.py
@@ -0,0 +1,661 @@
+from __future__ import annotations
+
+import collections
+import dataclasses
+import os
+import pathlib
+import pprint
+import sys
+import sysconfig
+import typing as T
+
+from .casters import PKGCONF_CASTER_EXT
+from .config.autowrap_yml import AutowrapConfigYaml
+from .config.pyproject_toml import ExtensionModuleConfig, TypeCasterConfig
+from .pkgconf_cache import PkgconfCache
+from .pyproject import PyProject
+from .util import relpath_walk_up
+
+import toposort
+
+
+@dataclasses.dataclass
+class Entrypoint:
+    """
+    Represents a python entrypoint that needs to be created when the wheel is installed
+
+    .. seealso:: https://packaging.python.org/en/latest/specifications/entry-points/
+    """
+
+    name: str
+    group: str
+    package: str
+
+
+@dataclasses.dataclass(frozen=True)
+class CppMacroValue:
+    """
+    Represents the value of a macro defined by the C++ compiler
+    """
+
+    name: str
+
+
+@dataclasses.dataclass(frozen=True)
+class LocalDependency:
+    name: str
+    include_paths: T.Tuple[pathlib.Path, ...]
+    depends: T.Tuple[T.Union[LocalDependency, str], ...]
+
+
+@dataclasses.dataclass(frozen=True)
+class InputFile:
+    # TODO: what is this relative to?
+    path: pathlib.Path
+
+
+@dataclasses.dataclass(frozen=True)
+class OutputFile:
+    name: str
+
+
+@dataclasses.dataclass(frozen=True)
+class Depfile:
+    # this is needed anytime a command generates something based on something
+    # that is more than just the input file
+    name: str
+
+
+@dataclasses.dataclass(frozen=True)
+class BuildTargetOutput:
+    # references a specific build target output
+    target: BuildTarget
+    output_index: int
+
+
+@dataclasses.dataclass(frozen=True)
+class BuildTarget:
+    command: str
+
+    args: T.Tuple[
+        T.Union[
+            str,
+            pathlib.Path,
+            InputFile,
+            OutputFile,
+            Depfile,
+            BuildTarget,
+            BuildTargetOutput,
+            CppMacroValue,
+        ],
+        ...,
+    ]
+
+    # Install path is always relative to py.get_install_dir(pure: false)
+    install_path: T.Optional[pathlib.Path]
+
+
+@dataclasses.dataclass(frozen=True)
+class ExtensionModule:
+
+    #: variable name/prefix in the build file
+    name: str
+
+    #: full package name of installed extension
+    package_name: str
+
+    sources: T.Tuple[BuildTarget, ...]
+    depends: T.Tuple[T.Union[LocalDependency, str], ...]
+
+    # extra include directories that won't be found via depends
+    include_directories: T.Tuple[pathlib.Path, ...]
+
+    # Install path is always relative to py.get_install_dir(pure: false)
+    install_path: pathlib.Path
+
+
+class PlanError(Exception):
+    pass
+
+
+def _split_ns(name: str) -> T.Tuple[str, str]:
+    ns = ""
+    idx = name.rfind("::")
+    if idx != -1:
+        ns = name[:idx]
+        name = name[idx + 2 :]
+    return ns, name
+
+
+class _BuildPlanner:
+    def __init__(self, project_root: pathlib.Path, missing_yaml_ok: bool = False):
+
+        self.project_root = project_root
+        self.missing_yaml_ok = missing_yaml_ok
+
+        self.pyproject = PyProject(project_root / "pyproject.toml")
+        self.pkgcache = PkgconfCache()
+        self.pyproject_input = InputFile(project_root / "pyproject.toml")
+
+        self.pyi_targets: T.List[BuildTarget] = []
+        self.pyi_args = []
+
+        self.local_caster_targets: T.Dict[str, BuildTargetOutput] = {}
+        self.local_dependencies: T.Dict[str, LocalDependency] = {}
+
+        sw_path = self.pkgcache.get("semiwrap").type_casters_path
+        assert sw_path is not None
+        self.semiwrap_type_caster_path = sw_path
+
+    def generate(self):
+
+        projectcfg = self.pyproject.project
+
+        #
+        # Export type casters
+        # .. this probably should be its own hatchling plugin?
+        #
+
+        for name, caster_cfg in projectcfg.export_type_casters.items():
+            yield from self._process_export_type_caster(name, caster_cfg)
+
+        # This is needed elsewhere
+        self._cpp_macro = CppMacroValue("__cplusplus")
+        yield self._cpp_macro
+
+        #
+        # Generate extension modules
+        #
+
+        for package_name, extension in self._sorted_extension_modules():
+            try:
+                yield from self._process_extension_module(package_name, extension)
+            except Exception as e:
+                raise PlanError(f"{package_name} failed") from e
+
+        # TODO: this conditional probably should be done in the build system instead
+        # cannot build pyi files when cross-compiling
+        if not (
+            "_PYTHON_HOST_PLATFORM" in os.environ
+            or "PYTHON_CROSSENV" in os.environ
+            or os.environ.get("SEMIWRAP_SKIP_PYI") == "1"
+        ):
+            # Make a pyi for every module
+            # - they depend on every module because it needs a working environment
+            #   and the user might import something
+            # - if there's a subpkg this fails, need to think about it
+            for pyi_target in self.pyi_targets:
+                yield BuildTarget(
+                    command="make-pyi",
+                    args=pyi_target.args + tuple(self.pyi_args),
+                    install_path=pyi_target.install_path,
+                )
+
+    def _resolve_dep(self, dname: str):
+        return self.local_dependencies.get(dname, dname)
+
+    def _process_export_type_caster(self, name: str, caster_cfg: TypeCasterConfig):
+
+        # Need to generate the data file and the .pc file
+        caster_target = BuildTarget(
+            command="publish-casters",
+            args=(
+                self.pyproject_input,
+                name,
+                OutputFile(f"{name}{PKGCONF_CASTER_EXT}"),
+                OutputFile(f"{name}.pc"),
+            ),
+            install_path=pathlib.Path(*caster_cfg.pypackage.split(".")),
+        )
+        yield caster_target
+
+        # Need an entrypoint to point at the .pc file
+        yield Entrypoint(group="pkg_config", name=name, package=caster_cfg.pypackage)
+
+        dep = self.pkgcache.add_local(
+            name=name,
+            includes=[self.project_root / inc for inc in caster_cfg.includedir],
+            requires=caster_cfg.requires,
+        )
+        caster_dep = LocalDependency(
+            name=dep.name,
+            include_paths=tuple(dep.include_path),
+            depends=tuple([self._resolve_dep(cd) for cd in caster_cfg.requires]),
+        )
+        self.local_dependencies[dep.name] = caster_dep
+        yield caster_dep
+
+        # The .pc file cannot be used in the build, but the data file must be, so
+        # store it so it can be used elsewhere
+        self.local_caster_targets[name] = BuildTargetOutput(caster_target, 0)
+
+    def _sorted_extension_modules(
+        self,
+    ) -> T.Generator[T.Tuple[str, ExtensionModuleConfig], None, None]:
+        # sort extension modules by dependencies, that way modules can depend on other modules
+        # also declared in pyproject.toml without needing to worry about ordering in the file
+        by_name = {}
+        to_sort: T.Dict[str, T.Set[str]] = {}
+
+        for package_name, extension in self.pyproject.project.extension_modules.items():
+            if extension.ignore:
+                continue
+
+            name = extension.name or package_name.replace(".", "_")
+            by_name[name] = (package_name, extension)
+
+            deps = to_sort.setdefault(name, set())
+            for dep in extension.wraps:
+                deps.add(dep)
+            for dep in extension.depends:
+                deps.add(dep)
+
+        for name in toposort.toposort_flatten(to_sort, sort=True):
+            data = by_name.get(name)
+            if data:
+                yield data
+
+    def _process_extension_module(
+        self, package_name: str, extension: ExtensionModuleConfig
+    ):
+        package_path_elems = package_name.split(".")
+        parent_package = ".".join(package_path_elems[:-1])
+        module_name = package_path_elems[-1]
+        package_path = pathlib.Path(*package_path_elems[:-1])
+        varname = extension.name or package_name.replace(".", "_")
+
+        # Detect the location of the package in the source tree
+        package_init_py = self.pyproject.package_root / package_path / "__init__.py"
+        self.pyi_args += [parent_package, package_init_py.as_posix()]
+
+        depends = self.pyproject.get_extension_deps(extension)
+        depends.append("semiwrap")
+
+        # Search path for wrapping is dictated by package_path and wraps
+        search_path, include_directories_uniq, caster_json_file, libinit_modules = (
+            self._prepare_dependency_paths(depends, extension)
+        )
+
+        includes = [
+            self.project_root / pathlib.PurePosixPath(inc) for inc in extension.includes
+        ]
+        search_path.extend(includes)
+
+        # Search the package path last
+        search_path.append(self.pyproject.package_root / package_path)
+
+        all_type_casters = BuildTarget(
+            command="resolve-casters",
+            args=(
+                OutputFile(f"{varname}.casters.pkl"),
+                Depfile(f"{varname}.casters.d"),
+                *caster_json_file,
+            ),
+            install_path=None,
+        )
+        yield all_type_casters
+
+        #
+        # Generate init.py for loading dependencies
+        #
+
+        libinit_module = None
+        if libinit_modules:
+            libinit_py = extension.libinit or f"_init_{module_name}.py"
+            libinit_tgt = BuildTarget(
+                command="gen-libinit-py",
+                args=(OutputFile(libinit_py), *libinit_modules),
+                install_path=package_path,
+            )
+
+            libinit_module = f"{parent_package}.{libinit_py}"[:-3]
+            self.pyi_args += [libinit_module, libinit_tgt]
+
+            yield libinit_tgt
+
+        #
+        # Publish a .pc file for this module
+        #
+
+        pc_args = [
+            package_name,
+            varname,
+            self.pyproject_input,
+            OutputFile(f"{varname}.pc"),
+        ]
+        if libinit_module:
+            pc_args += ["--libinit-py", libinit_module]
+
+        yield BuildTarget(
+            command="gen-pkgconf",
+            args=tuple(pc_args),
+            install_path=package_path,
+        )
+
+        yield Entrypoint(group="pkg_config", name=varname, package=parent_package)
+
+        #
+        # Process the headers
+        #
+
+        # Find and load the yaml
+        if extension.yaml_path is None:
+            yaml_path = pathlib.Path("wrapcfg")
+        else:
+            yaml_path = pathlib.Path(pathlib.PurePosixPath(extension.yaml_path))
+
+        datfiles, module_sources, subpackages = yield from self._process_headers(
+            extension,
+            package_path,
+            yaml_path,
+            include_directories_uniq.keys(),
+            search_path,
+            all_type_casters,
+        )
+
+        modinit = BuildTarget(
+            command="gen-modinit-hpp",
+            args=(
+                module_name,
+                OutputFile(f"semiwrap_init.{package_name}.hpp"),
+                *datfiles,
+            ),
+            install_path=None,
+        )
+        module_sources.append(modinit)
+        yield modinit
+
+        #
+        # Emit the module
+        #
+
+        # Use a local dependency to store everything so it can be referenced elsewhere
+        cached_dep = self.pkgcache.add_local(
+            name=varname,
+            includes=[*includes, self.pyproject.package_root / package_path],
+            requires=depends,
+            libinit_py=libinit_module,
+        )
+        local_dep = LocalDependency(
+            name=cached_dep.name,
+            include_paths=tuple(cached_dep.include_path),
+            depends=tuple(self._resolve_dep(dep) for dep in depends),
+        )
+        yield local_dep
+        self.local_dependencies[local_dep.name] = local_dep
+
+        modobj = ExtensionModule(
+            name=varname,
+            package_name=package_name,
+            sources=tuple(module_sources),
+            depends=(local_dep,),
+            include_directories=tuple(),
+            install_path=package_path,
+        )
+        yield modobj
+
+        self.pyi_args += [package_name, modobj]
+
+        # This is not yielded here because pyi targets need to depend on all modules
+        # via self.pyi_args.
+        # - The output .pyi files vary based on whether there are subpackages or not. If no
+        #   subpackage, it's {module_name}.pyi. If there are subpackages, it becomes
+        #   {module_name}/__init__.pyi and {module_name}/{subpackage}.pyi
+        # - As long as a user doesn't manually bind a subpackage our detection works here
+        #   but if we need to allow that then will need to declare subpackages in pyproject.toml
+        # .. this breaks if there are sub-sub packages, don't do that please
+
+        base_pyi_elems = package_name.split(".")
+
+        if subpackages:
+            pyi_elems = base_pyi_elems + ["__init__.pyi"]
+            pyi_args = [
+                pathlib.PurePath(*pyi_elems).as_posix(),
+                OutputFile("__init__.pyi"),
+            ]
+            for subpackage in subpackages:
+                pyi_elems = base_pyi_elems + [f"{subpackage}.pyi"]
+                pyi_args += [
+                    pathlib.PurePath(*pyi_elems).as_posix(),
+                    OutputFile(f"{subpackage}.pyi"),
+                ]
+
+            self.pyi_targets.append(
+                BuildTarget(
+                    command="make-pyi",
+                    args=(package_name, *pyi_args, "--"),
+                    install_path=package_path / module_name,
+                )
+            )
+
+        else:
+            base_pyi_elems[-1] = f"{base_pyi_elems[-1]}.pyi"
+
+            self.pyi_targets.append(
+                BuildTarget(
+                    command="make-pyi",
+                    args=(
+                        package_name,
+                        pathlib.PurePath(*base_pyi_elems).as_posix(),
+                        OutputFile(f"{module_name}.pyi"),
+                        "--",
+                    ),
+                    install_path=package_path,
+                )
+            )
+
+    def _locate_type_caster_json(
+        self,
+        depname: str,
+        caster_json_file: T.List[T.Union[BuildTargetOutput, pathlib.Path]],
+    ):
+        checked = set()
+        to_check = collections.deque([depname])
+        while to_check:
+            name = to_check.popleft()
+            checked.add(name)
+
+            entry = self.pkgcache.get(name)
+
+            if name in self.local_caster_targets:
+                caster_json_file.append(self.local_caster_targets[name])
+            else:
+                tc = entry.type_casters_path
+                if tc and tc not in caster_json_file:
+                    caster_json_file.append(tc)
+
+            for req in entry.requires:
+                if req not in checked:
+                    to_check.append(req)
+
+    def _prepare_dependency_paths(
+        self, depends: T.List[str], extension: ExtensionModuleConfig
+    ):
+        search_path: T.List[pathlib.Path] = []
+        include_directories_uniq: T.Dict[pathlib.Path, bool] = {}
+        caster_json_file: T.List[T.Union[BuildTargetOutput, pathlib.Path]] = []
+        libinit_modules: T.List[str] = []
+
+        # Add semiwrap default type casters
+        caster_json_file.append(self.semiwrap_type_caster_path)
+
+        for dep in depends:
+            entry = self.pkgcache.get(dep)
+            include_directories_uniq.update(
+                dict.fromkeys(entry.full_include_path, True)
+            )
+
+            # extend the search path if the dependency is in 'wraps'
+            if dep in extension.wraps:
+                search_path.extend(entry.include_path)
+
+            self._locate_type_caster_json(dep, caster_json_file)
+
+            if entry.libinit_py:
+                libinit_modules.append(entry.libinit_py)
+
+        return search_path, include_directories_uniq, caster_json_file, libinit_modules
+
+    def _process_headers(
+        self,
+        extension: ExtensionModuleConfig,
+        package_path: pathlib.Path,
+        yaml_path: pathlib.Path,
+        include_directories_uniq: T.Iterable[pathlib.Path],
+        search_path: T.List[pathlib.Path],
+        all_type_casters: BuildTarget,
+    ):
+        datfiles: T.List[BuildTarget] = []
+        module_sources: T.List[BuildTarget] = []
+        subpackages: T.Set[str] = set()
+
+        for yml, hdr in extension.headers.items():
+            yml_input = InputFile(yaml_path / f"{yml}.yml")
+
+            try:
+                ayml = AutowrapConfigYaml.from_file(self.project_root / yml_input.path)
+            except FileNotFoundError:
+                if not self.missing_yaml_ok:
+                    msg = f"{self.project_root / yml_input.path}: use `python3 -m semiwrap.cmd_creategen` to generate"
+                    raise FileNotFoundError(msg) from None
+                ayml = AutowrapConfigYaml()
+
+            # find the source header
+            h_input, h_root = self._locate_header(hdr, search_path)
+
+            header2dat_args = []
+            for inc in include_directories_uniq:
+                header2dat_args += ["-I", inc]
+
+            # https://github.com/pkgconf/pkgconf/issues/391
+            header2dat_args += ["-I", sysconfig.get_path("include")]
+
+            header2dat_args += ["--cpp", self._cpp_macro]
+
+            header2dat_args.append(yml)
+            header2dat_args.append(yml_input)
+            header2dat_args.append(h_input)
+            header2dat_args.append(h_root)
+            header2dat_args.append(all_type_casters)
+            header2dat_args.append(OutputFile(f"{yml}.dat"))
+            header2dat_args.append(Depfile(f"{yml}.d"))
+
+            datfile = BuildTarget(
+                command="header2dat", args=tuple(header2dat_args), install_path=None
+            )
+            yield datfile
+            datfiles.append(datfile)
+
+            # Every header has a .cpp file for binding
+            cppfile = BuildTarget(
+                command="dat2cpp",
+                args=(datfile, OutputFile(f"{yml}.cpp")),
+                install_path=None,
+            )
+            module_sources.append(cppfile)
+            yield cppfile
+
+            # Detect subpackages
+            for f in ayml.functions.values():
+                if f.ignore:
+                    continue
+                if f.subpackage:
+                    subpackages.add(f.subpackage)
+                for f in f.overloads.values():
+                    if f.subpackage:
+                        subpackages.add(f.subpackage)
+
+            for e in ayml.enums.values():
+                if e.ignore:
+                    continue
+                if e.subpackage:
+                    subpackages.add(e.subpackage)
+
+            # Every class gets a trampoline file, but some just have #error in them
+            for name, ctx in ayml.classes.items():
+                if ctx.ignore:
+                    continue
+
+                if ctx.subpackage:
+                    subpackages.add(ctx.subpackage)
+
+                cls_ns, cls_name = _split_ns(name)
+                cls_ns = cls_ns.replace(":", "_")
+                trampoline = BuildTarget(
+                    command="dat2trampoline",
+                    args=(datfile, name, OutputFile(f"{cls_ns}__{cls_name}.hpp")),
+                    install_path=package_path / "trampolines",
+                )
+                module_sources.append(trampoline)
+                yield trampoline
+
+            # Even more files if there are templates
+            if ayml.templates:
+
+                # Every template instantiation gets a cpp file to lessen compiler
+                # memory requirements
+                for i, (name, tctx) in enumerate(ayml.templates.items(), start=1):
+                    if tctx.subpackage:
+                        subpackages.add(tctx.subpackage)
+
+                    tmpl_cpp = BuildTarget(
+                        command="dat2tmplcpp",
+                        args=(datfile, name, OutputFile(f"{yml}_tmpl{i}.cpp")),
+                        install_path=None,
+                    )
+                    module_sources.append(tmpl_cpp)
+                    yield tmpl_cpp
+
+                # All of which use this hpp file
+                tmpl_hpp = BuildTarget(
+                    command="dat2tmplhpp",
+                    args=(datfile, OutputFile(f"{yml}_tmpl.hpp")),
+                    install_path=None,
+                )
+                module_sources.append(tmpl_hpp)
+                yield tmpl_hpp
+
+        return datfiles, module_sources, subpackages
+
+    def _locate_header(self, hdr: str, search_path: T.List[pathlib.Path]):
+        phdr = pathlib.PurePosixPath(hdr)
+        for p in search_path:
+            h_path = p / phdr
+            if h_path.exists():
+                # We should return this as an InputFile, but inputs must be relative to the
+                # project root, which may not be the case on windows. Incremental build should
+                # still work, because the header is included in a depfile
+                return h_path, p
+        raise FileNotFoundError(
+            f"cannot locate {phdr} in {', '.join(map(str, search_path))}"
+        )
+
+
+def makeplan(
+    project_root: pathlib.Path, missing_yaml_ok: bool = False
+) -> T.Generator[
+    T.Union[BuildTarget, Entrypoint, LocalDependency, ExtensionModule], None
+]:
+    """
+    Given the pyproject.toml configuration for a semiwrap project, reads the
+    configuration and generates a series of commands that can be used to parse
+    the input headers and generate the needed source code from them.
+    """
+    planner = _BuildPlanner(project_root, missing_yaml_ok)
+    yield from planner.generate()
+
+
+def main():
+    # this command only exists for debugging purposes
+    try:
+        _, project_root = sys.argv
+    except ValueError:
+        print(f"{sys.argv[0]} project_root", file=sys.stderr)
+        sys.exit(1)
+
+    output = list(makeplan(pathlib.Path(project_root)))
+    pprint.pprint(output, indent=1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/src/semiwrap/mkpc.py b/src/semiwrap/mkpc.py
new file mode 100644
index 00000000..1ef7737f
--- /dev/null
+++ b/src/semiwrap/mkpc.py
@@ -0,0 +1,52 @@
+import pathlib
+import typing as T
+
+from .pkgconf_cache import INITPY_VARNAME
+from .util import relpath_walk_up
+
+
+def make_pc_file(
+    project_root: pathlib.Path,
+    pcfile: pathlib.Path,
+    pc_install_path: pathlib.Path,
+    name: str,
+    desc: str,
+    version: str,
+    includes: T.List[str],
+    depends: T.List[str],
+    libinit_py: T.Optional[str],
+    generator_name: str,
+):
+
+    pc_content = [
+        f"# automatically generated by {generator_name}",
+        "prefix=${pcfiledir}",
+    ]
+
+    if libinit_py:
+        pc_content.append(f"{INITPY_VARNAME}={libinit_py}")
+
+    cflags = ["-I${prefix}"]
+
+    for i, inc in enumerate(includes):
+        includedir = project_root / pathlib.PurePosixPath(inc)
+        rel = relpath_walk_up(includedir, pc_install_path)
+        pc_content.append(f"inc{i}=${{prefix}}/{rel.as_posix()}")
+        cflags.append(f"-I${{inc{i}}}")
+
+    pc_content += [
+        "",
+        f"Name: {name}",
+        f"Description: {desc}",
+        f"Version: {version}",
+        "Cflags: " + " ".join(cflags),
+    ]
+
+    if depends:
+        requires = " ".join(depends)
+        pc_content.append(f"Requires: {requires}")
+
+    pc_content.append("")
+
+    with open(pcfile, "w") as fp:
+        fp.write("\n".join(pc_content))
diff --git a/robotpy_build/overrides.py b/src/semiwrap/overrides.py
similarity index 100%
rename from robotpy_build/overrides.py
rename to src/semiwrap/overrides.py
diff --git a/src/semiwrap/pkgconf_cache.py b/src/semiwrap/pkgconf_cache.py
new file mode 100644
index 00000000..806607f0
--- /dev/null
+++ b/src/semiwrap/pkgconf_cache.py
@@ -0,0 +1,116 @@
+import pathlib
+import shlex
+import typing as T
+
+import pkgconf
+
+
+INITPY_VARNAME = "pkgconf_pypi_initpy"
+
+
+class CacheEntry:
+
+    def __init__(self, name: str) -> None:
+        self.name = name
+        self.manual = False
+
+    def _get_pkgconf_data(self, *args) -> str:
+        r = pkgconf.run_pkgconf(self.name, *args, capture_output=True)
+        if r.returncode != 0:
+            raise KeyError(f"Package '{self.name}' is not installed")
+
+        return r.stdout.decode("utf-8").strip()
+
+    @property
+    def requires(self) -> T.List[str]:
+        if not hasattr(self, "_requires"):
+            raw = self._get_pkgconf_data("--print-requires")
+            self._requires = [r for r in raw.split("\n") if r]
+
+        return self._requires
+
+    @property
+    def include_path(self) -> T.List[pathlib.Path]:
+        """Only the include path for this package"""
+        if not hasattr(self, "_include_path"):
+            include_path = []
+            raw = self._get_pkgconf_data(
+                "--cflags-only-I", "--maximum-traverse-depth=1"
+            )
+            for i in shlex.split(raw):
+                assert i.startswith("-I")
+                include_path.append(pathlib.Path(i[2:]).absolute())
+
+            self._include_path = include_path
+
+        return self._include_path
+
+    @property
+    def full_include_path(self) -> T.List[pathlib.Path]:
+        """Include path for this package and requirements"""
+        if not hasattr(self, "_full_include_path"):
+            full_include_path = []
+            raw = self._get_pkgconf_data("--cflags-only-I")
+            for i in shlex.split(raw):
+                assert i.startswith("-I")
+                full_include_path.append(pathlib.Path(i[2:]).absolute())
+
+            self._full_include_path = full_include_path
+
+        return self._full_include_path
+
+    @property
+    def type_casters_path(self) -> T.Optional[pathlib.Path]:
+        if not hasattr(self, "_type_casters_path"):
+            raw = self._get_pkgconf_data("--path")
+            pc_path = pathlib.Path(raw)
+            type_caster_cfg = pc_path.with_suffix(".pybind11.json")
+            if type_caster_cfg.exists():
+                self._type_casters_path = type_caster_cfg
+            else:
+                self._type_casters_path = None
+
+        return self._type_casters_path
+
+    @property
+    def libinit_py(self) -> T.Optional[str]:
+        if not hasattr(self, "_libinit_py"):
+            raw = self._get_pkgconf_data(f"--variable={INITPY_VARNAME}")
+            if raw:
+                self._libinit_py = raw
+            else:
+                self._libinit_py = None
+
+        return self._libinit_py
+
+
+class PkgconfCache:
+    def __init__(self) -> None:
+        self._cache: T.Dict[str, CacheEntry] = {}
+
+    def add_local(
+        self,
+        name: str,
+        includes: T.List[pathlib.Path],
+        requires: T.List[str],
+        libinit_py: T.Optional[str] = None,
+    ) -> CacheEntry:
+        assert name not in self._cache
+        entry = CacheEntry(name)
+        entry.manual = True
+        entry._include_path = [inc.absolute() for inc in includes]
+        entry._full_include_path = entry._include_path[:]
+        entry._libinit_py = libinit_py
+        entry._type_casters_path = None
+        entry._requires = requires[:]
+        for req in requires:
+            dep = self.get(req)
+            entry._full_include_path.extend(dep.full_include_path)
+        self._cache[name] = entry
+        return entry
+
+    def get(self, depname: str) -> CacheEntry:
+        entry = self._cache.get(depname)
+        if entry is None:
+            self._cache[depname] = entry = CacheEntry(depname)
+        return entry
diff --git a/robotpy_build/platforms.py b/src/semiwrap/platforms.py
similarity index 93%
rename from robotpy_build/platforms.py
rename to src/semiwrap/platforms.py
index dc474e12..824a94c2 100644
--- a/robotpy_build/platforms.py
+++ b/src/semiwrap/platforms.py
@@ -1,4 +1,8 @@
-from distutils.util import get_platform as _get_platform
+try:
+    from distutils.util import get_platform as _get_platform
+except ImportError:
+    from sysconfig import get_platform as _get_platform
+
 from dataclasses import dataclass, field
 from typing import List
 import re
@@ -99,7 +103,7 @@ def get_platform(name: typing.Optional[str] = None) -> WPILibMavenPlatform:
     try:
         return _platforms[name]
     except KeyError:
-        raise KeyError(f"platform {name} is not supported by robotpy-build!")
+        raise KeyError(f"platform {name} is not supported by semiwrap!")
 
 
 def get_platform_override_keys(platform: WPILibMavenPlatform):
diff --git a/robotpy_build/pybind11 b/src/semiwrap/pybind11
similarity index 100%
rename from robotpy_build/pybind11
rename to src/semiwrap/pybind11
diff --git a/src/semiwrap/pyproject.py b/src/semiwrap/pyproject.py
new file mode 100644
index 00000000..81f15fd7
--- /dev/null
+++ b/src/semiwrap/pyproject.py
@@ -0,0 +1,87 @@
+import pathlib
+import typing as T
+
+import tomli
+
+from .config.util import parse_input
+from .config.pyproject_toml import ExtensionModuleConfig, SemiwrapToolConfig
+from .overrides import apply_overrides
+from .platforms import get_platform, get_platform_override_keys
+
+
+class PyProject:
+    def __init__(self, pyproject_path: T.Optional[pathlib.Path] = None) -> None:
+
+        if pyproject_path:
+            self.root = pyproject_path.parent.resolve()
+        else:
+            self.root = pathlib.Path().resolve()
+
+        self._platform = None
+        self._project = None
+        self._package_root = None
+
+        self._all_deps = None
+
+    @property
+    def package_root(self) -> pathlib.Path:
+        if self._package_root is None:
+            # try to detect packages based on the extension modules
+            for package_name in self.project.extension_modules.keys():
+                parent = pathlib.Path(*package_name.split(".")[:-1])
+                if (self.root / parent / "__init__.py").exists():
+                    self._package_root = self.root
+                    break
+                elif (self.root / "src" / parent / "__init__.py").exists():
+                    self._package_root = self.root / "src"
+                    break
+            else:
+                raise ValueError("Cannot determine package root")
+
+        return self._package_root
+
+    @property
+    def platform(self):
+        if self._platform is None:
+            self._platform = get_platform()
+        return self._platform
+
+    @property
+    def project(self):
+        if self._project is None:
+            project_fname = self.root / "pyproject.toml"
+
+            try:
+                with open(project_fname, "rb") as fp:
+                    self.pyproject = tomli.load(fp)
+            except FileNotFoundError as e:
+                raise ValueError("current directory is not a semiwrap project") from e
+
+            self.project_dict = self.pyproject.get("tool", {}).get("semiwrap", {})
+
+            # Overrides are applied before pydantic does processing, so that
+            # we can easily override anything without needing to make the
+            # pydantic schemas messy with needless details
+            override_keys = get_platform_override_keys(self.platform)
+            apply_overrides(self.project_dict, override_keys)
+
+            try:
+                self._project = parse_input(
+                    self.project_dict, SemiwrapToolConfig, project_fname
+                )
+            except Exception as e:
+                raise ValueError(
+                    f"semiwrap configuration in pyproject.toml is incorrect"
+                ) from e
+        return self._project
+
+    def get_extension(self, module_package_name: str) -> ExtensionModuleConfig:
+        return self.project.extension_modules[module_package_name]
+
+    def get_extension_deps(self, extension: ExtensionModuleConfig) -> T.List[str]:
+        deps = []
+        for wrap in extension.wraps:
+            if wrap not in extension.depends:
+                deps.append(wrap)
+        deps.extend(extension.depends)
+        return deps
diff --git a/src/semiwrap/semiwrap-pybind11.pc b/src/semiwrap/semiwrap-pybind11.pc
new file mode 100644
index 00000000..0bd54384
--- /dev/null
+++ b/src/semiwrap/semiwrap-pybind11.pc
@@ -0,0 +1,8 @@
+prefix=${pcfiledir}
+includedir=${prefix}/pybind11/include
+
+Name: semiwrap-pybind11
+Description: Semiwrap specific version of pybind11
+Version: 3.0.0.dev1
+Cflags: -I${includedir} -DPYBIND11_USE_SMART_HOLDER_AS_DEFAULT
+Libs:
diff --git a/src/semiwrap/semiwrap.pc b/src/semiwrap/semiwrap.pc
new file mode 100644
index 00000000..cb7ca174
--- /dev/null
+++ b/src/semiwrap/semiwrap.pc
@@ -0,0 +1,9 @@
+prefix=${pcfiledir}
+includedir=${prefix}/include
+
+Name: semiwrap
+Description: Semiwrap compile flags and include directories
+Version: 2025
+Requires: semiwrap-pybind11
+Cflags: -I${includedir}
+Libs:
diff --git a/src/semiwrap/semiwrap.pybind11.json b/src/semiwrap/semiwrap.pybind11.json
new file mode 100644
index 00000000..e1cf1604
--- /dev/null
+++ b/src/semiwrap/semiwrap.pybind11.json
@@ -0,0 +1,65 @@
+{
+    "headers": [
+        {
+            "header": "pybind11/stl.h",
+            "types": [
+                "std::vector",
+                "std::deque",
+                "std::list",
+                "std::array",
+                "std::valarray",
+                "std::set",
+                "std::map",
+                "std::unordered_map",
+                "std::optional",
+                "std::nullopt_t",
+                "std::variant"
+            ]
+        },
+        {
+            "header": "pybind11/functional.h",
+            "types": [
+                "std::function"
+            ]
+        },
+        {
+            "header": "pybind11/complex.h",
+            "types": [
+                "std::complex"
+            ]
+        },
+        {
+            "header": "pybind11/chrono.h",
+            "types": [
+                "std::chrono::duration",
+                "std::chrono::time_point"
+            ]
+        },
+        {
+            "header": "pybind11/eigen.h",
+            "types": [
+                "Eigen::Block",
+                "Eigen::DiagonalMatrix",
+                "Eigen::MatrixBase",
+                "Eigen::Matrix",
+                "Eigen::Matrix2d",
+                "Eigen::Matrix3d",
+                "Eigen::MatrixXcd",
+                "Eigen::MatrixXd",
+                "Eigen::MatrixXdR",
+                "Eigen::MatrixXi",
+                "Eigen::MatrixXf",
+                "Eigen::Ref",
+                "Eigen::Matrix4d",
+                "Eigen::RowVectorXf",
+                "Eigen::SparseMatrix",
+                "Eigen::SparseView",
+                "Eigen::Vector",
+                "Eigen::Vector2d",
+                "Eigen::Vector3d",
+                "Eigen::VectorXf",
+                "Eigen::VectorXcf"
+            ]
+        }
+    ]
+}
\ No newline at end of file
diff --git a/robotpy_build/tool.py b/src/semiwrap/tool.py
similarity index 100%
rename from robotpy_build/tool.py
rename to src/semiwrap/tool.py
diff --git a/src/semiwrap/util.py b/src/semiwrap/util.py
new file mode 100644
index 00000000..fa3920c6
--- /dev/null
+++ b/src/semiwrap/util.py
@@ -0,0 +1,27 @@
+import os.path
+import pathlib
+import typing as T
+
+
+def maybe_write_file(
+    path: pathlib.Path, content: str, *, encoding: T.Optional[str] = None
+) -> bool:
+    # returns True if new content written
+    if path.exists():
+        with open(path, encoding=encoding) as fp:
+            oldcontent = fp.read()
+        if oldcontent == content:
+            return False
+    elif not path.parent.exists():
+        path.parent.mkdir(parents=True)
+
+    with open(path, "w", encoding=encoding) as fp:
+        fp.write(content)
+
+    return True
+
+
+def relpath_walk_up(p: pathlib.Path, other: pathlib.Path) -> pathlib.Path:
+    # walk_up=True was introduced in Python 3.12 so can't use that
+    #   p.relative_to(other, walk_up=True)
+    return pathlib.Path(os.path.relpath(p, other))
diff --git a/tests/cpp/.gitignore b/tests/cpp/.gitignore
index 9dc347fd..a65f7f1a 100644
--- a/tests/cpp/.gitignore
+++ b/tests/cpp/.gitignore
@@ -1,6 +1,8 @@
 
 *.py[ciod]
 
+*.pc
+*.pybind11.json
 *.so
 *.dll
 *.pyd
@@ -8,28 +10,9 @@
 py.typed
 
 
-/build
-/dist
-/pip-wheel-metadata
+build
+dist
+trampolines
 
-# autogenerated from template
-/pyproject.toml
+version.py
 
-/rpytest/version.py
-
-/rpytest/dl/_init_rpytest_dl.py
-/rpytest/dl/pkgcfg.py
-/rpytest/dl/include
-/rpytest/dl/rpy-include
-
-/rpytest/ft/_init_rpytest_ft.py
-/rpytest/ft/pkgcfg.py
-/rpytest/ft/rpy-include
-
-/rpytest/srconly/_init_rpytest_srconly.py
-/rpytest/srconly/pkgcfg.py
-/rpytest/srconly/include
-/rpytest/srconly/rpy-include
-
-/rpytest/tc/_init_rpytest_tc.py
-/rpytest/tc/pkgcfg.py
\ No newline at end of file
diff --git a/tests/cpp/dl/downloaded.cpp b/tests/cpp/dl/downloaded.cpp
deleted file mode 100644
index 42af69a5..00000000
--- a/tests/cpp/dl/downloaded.cpp
+++ /dev/null
@@ -1,6 +0,0 @@
-
-#include "downloaded.h"
-
-int downloaded_fn(int val) {
-    return 0x42 + val;
-}
diff --git a/tests/cpp/dl/downloaded.h b/tests/cpp/dl/downloaded.h
deleted file mode 100644
index a7a7cdd1..00000000
--- a/tests/cpp/dl/downloaded.h
+++ /dev/null
@@ -1,4 +0,0 @@
-
-#pragma once 
-
-int downloaded_fn(int val);
diff --git a/tests/cpp/gen/dl/downloaded.yml b/tests/cpp/gen/dl/downloaded.yml
deleted file mode 100644
index b0ba34ae..00000000
--- a/tests/cpp/gen/dl/downloaded.yml
+++ /dev/null
@@ -1,4 +0,0 @@
----
-
-functions:
-  downloaded_fn:
diff --git a/tests/cpp/gen/ft/buffers.yml b/tests/cpp/gen/ft/buffers.yml
deleted file mode 100644
index 10ee3590..00000000
--- a/tests/cpp/gen/ft/buffers.yml
+++ /dev/null
@@ -1,28 +0,0 @@
----
-
-classes:
-  Buffers:
-    methods:
-      set_buffer:
-        buffers:
-        - { type: in, src: data, len: len }
-      get_buffer2:
-        buffers:
-        - { type: out, src: data, len: len }
-      get_buffer1:
-        buffers:
-        - { type: out, src: data, len: len }
-      inout_buffer:
-        buffers:
-        - { type: in, src: indata, len: size }
-        - { type: out, src: outdata, len: size }
-
-      v_set_buffer:
-        buffers:
-        - { type: in, src: data, len: len }
-      v_get_buffer2:
-        buffers:
-        - { type: out, src: data, len: len }
-      v_get_buffer1:
-        buffers:
-        - { type: out, src: data, len: len }
\ No newline at end of file
diff --git a/tests/cpp/gen/ft/using.yml b/tests/cpp/gen/ft/using.yml
deleted file mode 100644
index e426a47e..00000000
--- a/tests/cpp/gen/ft/using.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-typealias:
-- cr::AlsoCantResolve
-
-classes:
-  ProtectedUsing:
-    typealias:
-    - cr::CantResolve
\ No newline at end of file
diff --git a/tests/cpp/pyproject.toml.tmpl b/tests/cpp/pyproject.toml.tmpl
deleted file mode 100644
index f22d1a3b..00000000
--- a/tests/cpp/pyproject.toml.tmpl
+++ /dev/null
@@ -1,140 +0,0 @@
-[build-system]
-requires = ["robotpy-build"]
-
-[tool.robotpy-build]
-base_package = "rpytest"
-
-[tool.robotpy-build.wrappers."rpytest.dl".maven_lib_download]
-artifact_id = "dl"
-group_id = "fake.dl"
-repo_url = "http://127.0.0.1:RANDOM_PORT"
-version = "1.2.3"
-use_sources = true
-sources = [
-    "downloaded.cpp"
-]
-
-[[tool.robotpy-build.wrappers."rpytest.dl".maven_lib_download.header_patches]]
-patch = "rpytest/dl/dl.patch"
-strip = 0
-
-[tool.robotpy-build.wrappers."rpytest.dl"]
-name = "rpytest_dl"
-
-sources = [
-    "rpytest/dl/dl.cpp"
-]
-
-generation_data = "gen/dl"
-
-[tool.robotpy-build.wrappers."rpytest.dl".autogen_headers]
-downloaded = "downloaded.h"
-
-[tool.robotpy-build.wrappers."rpytest.ft"]
-name = "rpytest_ft"
-depends = ["rpytest_tc"]
-
-sources = [
-    "rpytest/ft/src/fields.cpp",
-    "rpytest/ft/src/ft.cpp",
-    "rpytest/ft/src/using2.cpp",
-]
-
-generation_data = "gen/ft"
-generate = [
-    { abstract = "abstract.h" },
-    { base_qualname = "base_qualname.h" },
-    { base_qualname_hidden = "base_qualname_hidden.h" },
-    { buffers = "buffers.h" },
-    { custom_type_caster = "custom_type_caster.h" },
-    { defaults = "defaults.h" },
-    { docstrings = "docstrings.h" },
-    { docstrings_append = "docstrings_append.h" },
-    { enums = "enums.h" },
-    { factory = "factory.h" },
-    { fields = "fields.h" },
-    { gilsafe_container = "gilsafe_container.h" },
-    { keepalive = "keepalive.h" },
-    { ignore = "ignore.h" },
-    { ignored_by_default = "ignored_by_default.h" },
-    { inline_code = "inline_code.h" },
-    { lifetime = "lifetime.h" },
-    { nested = "nested.h" },
-    { ns_class = "ns_class.h" },
-    { ns_hidden = "ns_hidden.h" },
-    { operators = "operators.h" },
-    { overloads = "overloads.h" },
-    { parameters = "parameters.h" },
-    { refqual = "refqual.h" },
-    { rename = "rename.h" },
-    { retval = "retval.h" },
-    { subpkg = "subpkg.h" },
-    { static_only = "static_only.h" },
-    { trampoline = "trampoline.h" },
-    { type_caster = "type_caster.h" },
-    { type_caster_nested = "type_caster_nested.h" },
-    { using = "using.h" },
-    { using2 = "using2.h" },
-    { virtual_comma = "virtual_comma.h" },
-    { virtual_xform = "virtual_xform.h" },
-
-    # Inheritance
-    { IBase = "inheritance/ibase.h" },
-    { IChild = "inheritance/ichild.h" },
-    { IMChild = "inheritance/imchild.h" },
-    { IGChild = "inheritance/igchild.h" },
-    { Overloaded = "inheritance/overloaded.h" },
-    { mvi = "inheritance/mvi.h" },
-    { usingparent = "inheritance/usingparent.h" },
-
-    # Protection
-    { PBase = "protection/pbase.h" },
-    { PChild = "protection/pchild.h" },
-    { PGChild = "protection/pgchild.h" },
-
-    # Templates
-    {tbase = "templates/tbase.h"},
-    {tcrtp = "templates/tcrtp.h"},
-    {tcrtpfwd = "templates/tcrtpfwd.h"},
-    {tconcrete = "templates/tconcrete.h"},
-
-    {tvbase = "templates/tvbase.h"},
-    {tvchild = "templates/tvchild.h"},
-
-    {tbasic = "templates/basic.h"},
-    {tdependent_base = "templates/dependent_base.h"},
-    {tdependent_param = "templates/dependent_param.h"},
-    {tdependent_using = "templates/dependent_using.h"},
-    {tdependent_using2 = "templates/dependent_using2.h"},
-    {tfn = "templates/fn.h" },
-    {tnumeric = "templates/numeric.h"},
-    {tnested = "templates/nested.h"},
-]
-
-[tool.robotpy-build.wrappers."rpytest.srconly"]
-name = "rpytest_srconly"
-sources = [
-    "rpytest/srconly/srconly.cpp"
-]
-
-[tool.robotpy-build.wrappers."rpytest.tc"]
-name = "rpytest_tc"
-
-[[tool.robotpy-build.wrappers."rpytest.tc".type_casters]]
-header = "rpyint_type_caster.h"
-types = ["rpy::rpyint"]
-default_arg_cast = true
-
-[[tool.robotpy-build.wrappers."rpytest.tc".type_casters]]
-header = "rpyint_type_caster.h"
-types = ["rpyint_plus_5"]
-
-
-[tool.robotpy-build.metadata]
-name = "robotpy-build-test"
-description = "robotpy-build test program"
-author = "RobotPy Development Team"
-author_email = "robotpy@googlegroups.com"
-url = "https://github.com/robotpy/robotpy-build"
-license = "BSD-3-Clause"
-install_requires = []
\ No newline at end of file
diff --git a/tests/cpp/rpytest/__init__.py b/tests/cpp/rpytest/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/tests/cpp/rpytest/dl/__init__.py b/tests/cpp/rpytest/dl/__init__.py
deleted file mode 100644
index acced46d..00000000
--- a/tests/cpp/rpytest/dl/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from . import _init_rpytest_dl
-
-# autogenerated by 'robotpy-build create-imports rpytest.dl rpytest.dl._rpytest_dl'
-from ._rpytest_dl import downloaded_fn, extra_content
-
-__all__ = ["downloaded_fn", "extra_content"]
diff --git a/tests/cpp/rpytest/dl/dl.cpp b/tests/cpp/rpytest/dl/dl.cpp
deleted file mode 100644
index c7e6095e..00000000
--- a/tests/cpp/rpytest/dl/dl.cpp
+++ /dev/null
@@ -1,3 +0,0 @@
-#include <rpygen_wrapper.hpp>
-
-RPYBUILD_PYBIND11_MODULE(m) { initWrapper(m); }
\ No newline at end of file
diff --git a/tests/cpp/rpytest/dl/dl.patch b/tests/cpp/rpytest/dl/dl.patch
deleted file mode 100644
index ce7fabed..00000000
--- a/tests/cpp/rpytest/dl/dl.patch
+++ /dev/null
@@ -1,10 +0,0 @@
---- downloaded.h.old	2022-02-23 23:09:51.053000000 -0500
-+++ downloaded.h	2022-02-23 23:10:13.861277275 -0500
-@@ -1,4 +1,6 @@
- 
--#pragma once 
-+#pragma once
-+
-+inline bool extra_content() { return true; }
- 
- int downloaded_fn(int val);
diff --git a/tests/cpp/rpytest/ft/subpkg.py b/tests/cpp/rpytest/ft/subpkg.py
deleted file mode 100644
index 09251e93..00000000
--- a/tests/cpp/rpytest/ft/subpkg.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# autogenerated by 'robotpy-build create-imports rpytest.ft rpytest.ft._rpytest_ft.subpkg'
-from ._rpytest_ft.subpkg import SPClass, sp_func
-
-__all__ = ["SPClass", "sp_func"]
diff --git a/tests/cpp/rpytest/srconly/__init__.py b/tests/cpp/rpytest/srconly/__init__.py
deleted file mode 100644
index 91e56528..00000000
--- a/tests/cpp/rpytest/srconly/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# autogenerated by 'robotpy-build create-imports rpytest.srconly rpytest.srconly._rpytest_srconly'
-from ._rpytest_srconly import srconly_fn
-
-__all__ = ["srconly_fn"]
diff --git a/tests/cpp/rpytest/srconly/srconly.cpp b/tests/cpp/rpytest/srconly/srconly.cpp
deleted file mode 100644
index 0a101eae..00000000
--- a/tests/cpp/rpytest/srconly/srconly.cpp
+++ /dev/null
@@ -1,10 +0,0 @@
-
-#include <robotpy_build.h>
-
-int srconly_fn(int val) {
-    return val - 0x42;
-}
-
-RPYBUILD_PYBIND11_MODULE(m) {
-    m.def("srconly_fn", &srconly_fn);
-}
\ No newline at end of file
diff --git a/tests/cpp/run_install.py b/tests/cpp/run_install.py
index 2b45723c..717f9395 100755
--- a/tests/cpp/run_install.py
+++ b/tests/cpp/run_install.py
@@ -1,76 +1,41 @@
 #!/usr/bin/env python3
 
-import http.server
 import os
 from os.path import abspath, dirname, join
 import pathlib
 import shutil
 import sys
 import subprocess
-import threading
 import tempfile
 import zipfile
 
 
-def create_artifact_path(path, group_id, artifact_id, version, classifier):
-    components = group_id.split(".") + [artifact_id, version]
-    path = join(path, *components)
-    os.makedirs(path, exist_ok=True)
-    fname = f"{artifact_id}-{version}-{classifier}.zip"
-    return join(path, fname)
-
-
-def http_server():
-    httpd = http.server.HTTPServer(
-        ("127.0.0.1", 0), http.server.SimpleHTTPRequestHandler
-    )
-
-    t = threading.Thread(target=httpd.serve_forever, daemon=True)
-    t.start()
-
-    return httpd, httpd.socket.getsockname()[1]
+def subprocess_must_run(*args, **kwargs):
+    """Run a subprocess verbosely and exit if there is an error"""
+    try:
+        print("+", *args[0])
+        subprocess.run(check=True, *args, **kwargs)
+    except subprocess.CalledProcessError as cbe:
+        print(cbe, file=sys.stderr)
+        sys.exit(cbe.returncode)
 
 
 if __name__ == "__main__":
     root = abspath(dirname(__file__))
     os.chdir(root)
 
-    # delete build/cache directory
-    shutil.rmtree(join(root, "build", "cache"), ignore_errors=True)
-
-    # create tempdir with maven directory structure for pkg
-    with tempfile.TemporaryDirectory() as d:
-        # create headers and sources zip files
-        hname = create_artifact_path(d, "fake.dl", "dl", "1.2.3", "headers")
-        with zipfile.ZipFile(hname, "w") as z:
-            z.write(join(root, "dl", "downloaded.h"), "downloaded.h")
-
-        sname = create_artifact_path(d, "fake.dl", "dl", "1.2.3", "sources")
-        with zipfile.ZipFile(sname, "w") as z:
-            z.write(join(root, "dl", "downloaded.cpp"), "downloaded.cpp")
-
-        # http.server prior to 3.7 could only serve the current directory
-        os.chdir(d)
+    to_install = ["sw-test-base", "sw-caster-consumer", "sw-test"]
 
-        # start http server on random port
-        httpd, port = http_server()
-
-        with open(join(root, "pyproject.toml.tmpl")) as rfp:
-            content = rfp.read().replace("RANDOM_PORT", str(port))
-            with open(join(root, "pyproject.toml"), "w") as wfp:
-                wfp.write(content)
-
-        cwd = None
+    # First, uninstall packages
+    subprocess_must_run(
+        [sys.executable, "-m", "pip", "--disable-pip-version-check", "uninstall", "-y"]
+        + to_install
+    )
 
-        if len(sys.argv) == 2 and sys.argv[1] == "wheel":
-            cmd_args = [sys.executable, "-m", "build", "--wheel", "--no-isolation"]
-            cwd = root
-        elif len(sys.argv) == 2 and sys.argv[1] == "develop":
-            cmd_args = [sys.executable, "setup.py", "develop", "-N"]
-            cwd = root
-        else:
-            # run pip install
-            cmd_args = [
+    # Now install them
+    for pkg in to_install:
+        subprocess_must_run(
+            [
                 sys.executable,
                 "-m",
                 "pip",
@@ -78,17 +43,7 @@ def http_server():
                 "--disable-pip-version-check",
                 "install",
                 "--no-build-isolation",
+                os.path.abspath(pkg),
             ]
-
-            if len(sys.argv) == 2 and sys.argv[1] == "-e":
-                cmd_args.append("-e")
-
-            cmd_args.append(root)
-
-        env = os.environ.copy()
-        env["SETUPTOOLS_SCM_PRETEND_VERSION"] = "0.0.1"
-
-        subprocess.check_call(cmd_args, cwd=cwd, env=env)
-
-        # Windows fails if you try to delete the directory you're currently in
-        os.chdir(root)
+            + sys.argv[1:]
+        )
diff --git a/tests/cpp/setup.py b/tests/cpp/setup.py
deleted file mode 100755
index 3542d0c9..00000000
--- a/tests/cpp/setup.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python3
-
-from robotpy_build.setup import setup
-
-setup()
diff --git a/tests/cpp/sw-caster-consumer/meson.build b/tests/cpp/sw-caster-consumer/meson.build
new file mode 100644
index 00000000..1bec68f8
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/meson.build
@@ -0,0 +1,11 @@
+project('sw-caster-consumer', ['cpp'],
+        default_options: ['warning_level=1', 'cpp_std=c++20',
+                          'b_colorout=auto', 'optimization=2', 'b_pie=true'])
+
+subdir('wrapcfg')
+
+sw_caster_consumer__module_sources += files(
+  'src/sw_caster_consumer/cpp/main.cpp',
+)
+
+subdir('wrapcfg/modules')
diff --git a/tests/cpp/sw-caster-consumer/pyproject.toml b/tests/cpp/sw-caster-consumer/pyproject.toml
new file mode 100644
index 00000000..9d9afcd7
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/pyproject.toml
@@ -0,0 +1,29 @@
+[build-system]
+build-backend = "hatchling.build"
+requires = ["semiwrap", "hatch-meson", "hatchling"]
+
+[project]
+description = "Test program"
+name = "sw-caster-consumer"
+version = "0.0.1"
+
+[tool.hatch.build.targets.wheel]
+packages = ['src/sw_caster_consumer']
+
+[tool.hatch.build.hooks.semiwrap]
+
+[tool.hatch.build.hooks.meson]
+
+#
+# Semiwrap configuration
+#
+
+[tool.semiwrap]
+[tool.semiwrap.extension_modules."sw_caster_consumer._module"]
+depends = ["swtest_base__module"]
+
+[tool.semiwrap.extension_modules."sw_caster_consumer._module".headers]
+more = "cpp/more.h"
+
+# checks that caster is included automatically when in a namespace
+ns_more = "cpp/ns_more.h"
\ No newline at end of file
diff --git a/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/__init__.py b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/__init__.py
new file mode 100644
index 00000000..7d849c08
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/__init__.py
@@ -0,0 +1 @@
+from ._module import add_more_to_inty
diff --git a/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/main.cpp b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/main.cpp
new file mode 100644
index 00000000..ac260f64
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/main.cpp
@@ -0,0 +1,5 @@
+#include <semiwrap_init.sw_caster_consumer._module.hpp>
+
+SEMIWRAP_PYBIND11_MODULE(m) {
+    initWrapper(m);
+}
diff --git a/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/more.h b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/more.h
new file mode 100644
index 00000000..e83312ed
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/more.h
@@ -0,0 +1,8 @@
+#pragma once
+
+#include <inty.h>
+
+inline inty add_more_to_inty(inty v, long value) {
+    v.long_value += value;
+    return v;
+}
diff --git a/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/ns_more.h b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/ns_more.h
new file mode 100644
index 00000000..39417ee6
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/src/sw_caster_consumer/cpp/ns_more.h
@@ -0,0 +1,12 @@
+#pragma once
+
+#include <ns_inty.h>
+
+namespace ns {
+
+inline inty2 add_more_to_inty2(inty2 v, long value) {
+    v.long_value += value;
+    return v;
+}
+
+}
diff --git a/tests/cpp/sw-caster-consumer/wrapcfg/.gitignore b/tests/cpp/sw-caster-consumer/wrapcfg/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/wrapcfg/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-caster-consumer/wrapcfg/modules/.gitignore b/tests/cpp/sw-caster-consumer/wrapcfg/modules/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/wrapcfg/modules/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-caster-consumer/wrapcfg/more.yml b/tests/cpp/sw-caster-consumer/wrapcfg/more.yml
new file mode 100644
index 00000000..f9dafd81
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/wrapcfg/more.yml
@@ -0,0 +1,4 @@
+---
+
+functions:
+  add_more_to_inty:
diff --git a/tests/cpp/sw-caster-consumer/wrapcfg/ns_more.yml b/tests/cpp/sw-caster-consumer/wrapcfg/ns_more.yml
new file mode 100644
index 00000000..a9b89c3f
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/wrapcfg/ns_more.yml
@@ -0,0 +1,4 @@
+---
+
+functions:
+  add_more_to_inty2:
diff --git a/tests/cpp/sw-caster-consumer/wrapcfg/trampolines/.gitignore b/tests/cpp/sw-caster-consumer/wrapcfg/trampolines/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-caster-consumer/wrapcfg/trampolines/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test-base/.gitignore b/tests/cpp/sw-test-base/.gitignore
new file mode 100644
index 00000000..3b0e1720
--- /dev/null
+++ b/tests/cpp/sw-test-base/.gitignore
@@ -0,0 +1,3 @@
+/src/swtest_base/sw-caster-test-casters.pc
+/src/swtest_base/sw-caster-test-casters.pybind11.json
+/src/swtest_base/trampolines
\ No newline at end of file
diff --git a/tests/cpp/sw-test-base/meson.build b/tests/cpp/sw-test-base/meson.build
new file mode 100644
index 00000000..30ce2dc4
--- /dev/null
+++ b/tests/cpp/sw-test-base/meson.build
@@ -0,0 +1,19 @@
+project('swtest-base', ['cpp'],
+        default_options: ['warning_level=1', 'cpp_std=c++20',
+                          'b_colorout=auto', 'optimization=2', 'b_pie=true'])
+
+subdir('wrapcfg')
+
+swtest_base__module_sources += files(
+  'src/swtest_base/cpp/main.cpp',
+)
+
+swtest_base__module2_sources += files(
+  'src/swtest_base/cpp/main2.cpp',
+)
+
+swtest_base__module3_sources += files(
+  'src/swtest_base/cpp/main3.cpp',
+)
+
+subdir('wrapcfg/modules')
diff --git a/tests/cpp/sw-test-base/pyproject.toml b/tests/cpp/sw-test-base/pyproject.toml
new file mode 100644
index 00000000..35bbb60b
--- /dev/null
+++ b/tests/cpp/sw-test-base/pyproject.toml
@@ -0,0 +1,52 @@
+[build-system]
+build-backend = "hatchling.build"
+requires = ["semiwrap", "hatch-meson", "hatchling"]
+
+[project]
+description = "Test program"
+name = "sw-test-base"
+version = "0.0.1"
+
+[tool.hatch.build.targets.wheel]
+packages = ['src/swtest_base']
+
+[tool.hatch.build.hooks.semiwrap]
+
+[tool.hatch.build.hooks.meson]
+
+#
+# Semiwrap configuration
+#
+
+[tool.semiwrap]
+[tool.semiwrap.extension_modules."swtest_base._module"]
+depends = ["sw-test-base-casters"]
+includes = ["src/swtest_base/cpp"]
+
+[tool.semiwrap.extension_modules."swtest_base._module".headers]
+fn = "cpp/fn.h"
+base_class = "cpp/baseclass.h"
+
+
+[tool.semiwrap.extension_modules."swtest_base._module2"]
+depends = ["swtest_base__module", "swtest_base__module3"]
+
+[tool.semiwrap.extension_modules."swtest_base._module2".headers]
+fn2 = "cpp/fn2.h"
+
+
+[tool.semiwrap.extension_modules."swtest_base._module3"]
+# empty module to test dependency on module declared after a module
+
+
+[tool.semiwrap.export_type_casters.sw-test-base-casters]
+pypackage = "swtest_base"
+includedir = ["src/swtest_base/cpp/type_casters"]
+
+[[tool.semiwrap.export_type_casters.sw-test-base-casters.headers]]
+header = "inty_cast.h"
+types = ["inty"]
+
+[[tool.semiwrap.export_type_casters.sw-test-base-casters.headers]]
+header = "ns_inty_cast.h"
+types = ["ns::inty2"]
diff --git a/tests/cpp/sw-test-base/src/swtest_base/__init__.py b/tests/cpp/sw-test-base/src/swtest_base/__init__.py
new file mode 100644
index 00000000..2e3f4cc1
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/__init__.py
@@ -0,0 +1,4 @@
+# for type caster
+inty = int
+
+from ._module import add_to_inty
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/baseclass.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/baseclass.h
new file mode 100644
index 00000000..1b8c67c8
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/baseclass.h
@@ -0,0 +1,14 @@
+#pragma  once
+
+#include <string>
+
+// tests trampoline across packages
+class abaseclass {
+public:
+    virtual ~abaseclass() = default;
+
+    inline virtual std::string fn() {
+        return "abaseclass";
+    }
+
+};
\ No newline at end of file
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/fn.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/fn.h
new file mode 100644
index 00000000..6d526314
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/fn.h
@@ -0,0 +1,8 @@
+#pragma once
+
+#include "inty.h"
+
+inline inty add_to_inty(inty v, long value) {
+    v.long_value += value;
+    return v;
+}
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/fn2.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/fn2.h
new file mode 100644
index 00000000..871b9ab2
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/fn2.h
@@ -0,0 +1,5 @@
+#pragma once
+
+int something() {
+    return 1;
+}
\ No newline at end of file
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/inty.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/inty.h
new file mode 100644
index 00000000..756b30f9
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/inty.h
@@ -0,0 +1,3 @@
+#pragma once
+
+struct inty { long long_value; };
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/main.cpp b/tests/cpp/sw-test-base/src/swtest_base/cpp/main.cpp
new file mode 100644
index 00000000..a06c0a6d
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/main.cpp
@@ -0,0 +1,5 @@
+#include <semiwrap_init.swtest_base._module.hpp>
+
+SEMIWRAP_PYBIND11_MODULE(m) {
+    initWrapper(m);
+}
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/main2.cpp b/tests/cpp/sw-test-base/src/swtest_base/cpp/main2.cpp
new file mode 100644
index 00000000..13bcf96f
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/main2.cpp
@@ -0,0 +1,5 @@
+#include <semiwrap_init.swtest_base._module2.hpp>
+
+SEMIWRAP_PYBIND11_MODULE(m) {
+    initWrapper(m);
+}
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/main3.cpp b/tests/cpp/sw-test-base/src/swtest_base/cpp/main3.cpp
new file mode 100644
index 00000000..1ff8e014
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/main3.cpp
@@ -0,0 +1,5 @@
+#include <semiwrap_init.swtest_base._module3.hpp>
+
+SEMIWRAP_PYBIND11_MODULE(m) {
+    initWrapper(m);
+}
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/ns_inty.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/ns_inty.h
new file mode 100644
index 00000000..2348f14e
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/ns_inty.h
@@ -0,0 +1,9 @@
+#pragma once
+
+namespace ns {
+
+struct inty2 { long long_value; };
+
+}
+
+
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/remote_class.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/remote_class.h
new file mode 100644
index 00000000..d4e7a9c3
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/remote_class.h
@@ -0,0 +1,4 @@
+#pragma  once
+
+// Make sure we can wrap a class from a different package
+struct RemoteClass {};
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/inty_cast.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/inty_cast.h
new file mode 100644
index 00000000..67796590
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/inty_cast.h
@@ -0,0 +1,51 @@
+#pragma once
+
+//
+// From pybind11 documentation
+//
+
+#include <pybind11/pybind11.h>
+
+#include "../inty.h"
+
+namespace pybind11 { namespace detail {
+    template <> struct type_caster<inty> {
+    public:
+        /**
+         * This macro establishes the name 'inty' in
+         * function signatures and declares a local variable
+         * 'value' of type inty
+         */
+        PYBIND11_TYPE_CASTER(inty, const_name("swtest_base.inty"));
+
+        /**
+         * Conversion part 1 (Python->C++): convert a PyObject into a inty
+         * instance or return false upon failure. The second argument
+         * indicates whether implicit conversions should be applied.
+         */
+        bool load(handle src, bool) {
+            /* Extract PyObject from handle */
+            PyObject *source = src.ptr();
+            /* Try converting into a Python integer value */
+            PyObject *tmp = PyNumber_Long(source);
+            if (!tmp)
+                return false;
+            /* Now try to convert into a C++ int */
+            value.long_value = PyLong_AsLong(tmp);
+            Py_DECREF(tmp);
+            /* Ensure return code was OK (to avoid out-of-range errors etc) */
+            return !(value.long_value == -1 && !PyErr_Occurred());
+        }
+
+        /**
+         * Conversion part 2 (C++ -> Python): convert an inty instance into
+         * a Python object. The second and third arguments are used to
+         * indicate the return value policy and parent object (for
+         * ``return_value_policy::reference_internal``) and are generally
+         * ignored by implicit casters.
+         */
+        static handle cast(inty src, return_value_policy /* policy */, handle /* parent */) {
+            return PyLong_FromLong(src.long_value);
+        }
+    };
+}} // namespace PYBIND11_NAMESPACE::detail
diff --git a/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/ns_inty_cast.h b/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/ns_inty_cast.h
new file mode 100644
index 00000000..bdb6691f
--- /dev/null
+++ b/tests/cpp/sw-test-base/src/swtest_base/cpp/type_casters/ns_inty_cast.h
@@ -0,0 +1,51 @@
+#pragma once
+
+//
+// From pybind11 documentation
+//
+
+#include <pybind11/pybind11.h>
+
+#include "../ns_inty.h"
+
+namespace pybind11 { namespace detail {
+    template <> struct type_caster<ns::inty2> {
+    public:
+        /**
+         * This macro establishes the name 'inty' in
+         * function signatures and declares a local variable
+         * 'value' of type inty
+         */
+        PYBIND11_TYPE_CASTER(ns::inty2, const_name("swtest_base.inty"));
+
+        /**
+         * Conversion part 1 (Python->C++): convert a PyObject into a inty
+         * instance or return false upon failure. The second argument
+         * indicates whether implicit conversions should be applied.
+         */
+        bool load(handle src, bool) {
+            /* Extract PyObject from handle */
+            PyObject *source = src.ptr();
+            /* Try converting into a Python integer value */
+            PyObject *tmp = PyNumber_Long(source);
+            if (!tmp)
+                return false;
+            /* Now try to convert into a C++ int */
+            value.long_value = PyLong_AsLong(tmp);
+            Py_DECREF(tmp);
+            /* Ensure return code was OK (to avoid out-of-range errors etc) */
+            return !(value.long_value == -1 && !PyErr_Occurred());
+        }
+
+        /**
+         * Conversion part 2 (C++ -> Python): convert an inty instance into
+         * a Python object. The second and third arguments are used to
+         * indicate the return value policy and parent object (for
+         * ``return_value_policy::reference_internal``) and are generally
+         * ignored by implicit casters.
+         */
+        static handle cast(ns::inty2 src, return_value_policy /* policy */, handle /* parent */) {
+            return PyLong_FromLong(src.long_value);
+        }
+    };
+}} // namespace PYBIND11_NAMESPACE::detail
diff --git a/tests/cpp/sw-test-base/wrapcfg/.gitignore b/tests/cpp/sw-test-base/wrapcfg/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test-base/wrapcfg/base_class.yml b/tests/cpp/sw-test-base/wrapcfg/base_class.yml
new file mode 100644
index 00000000..345ab863
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/base_class.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  abaseclass:
+    methods:
+      fn:
diff --git a/tests/cpp/sw-test-base/wrapcfg/fn.yml b/tests/cpp/sw-test-base/wrapcfg/fn.yml
new file mode 100644
index 00000000..34aac061
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/fn.yml
@@ -0,0 +1,4 @@
+---
+
+functions:
+  add_to_inty:
diff --git a/tests/cpp/sw-test-base/wrapcfg/fn2.yml b/tests/cpp/sw-test-base/wrapcfg/fn2.yml
new file mode 100644
index 00000000..11ec4519
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/fn2.yml
@@ -0,0 +1,4 @@
+---
+
+functions:
+  something:
diff --git a/tests/cpp/sw-test-base/wrapcfg/modules/.gitignore b/tests/cpp/sw-test-base/wrapcfg/modules/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/modules/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test-base/wrapcfg/trampolines/.gitignore b/tests/cpp/sw-test-base/wrapcfg/trampolines/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test-base/wrapcfg/trampolines/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test/meson.build b/tests/cpp/sw-test/meson.build
new file mode 100644
index 00000000..9d074d99
--- /dev/null
+++ b/tests/cpp/sw-test/meson.build
@@ -0,0 +1,13 @@
+project('sw-test', ['cpp'],
+        default_options: ['warning_level=1', 'cpp_std=c++20',
+                          'b_colorout=auto', 'optimization=2', 'b_pie=true'])
+
+subdir('wrapcfg')
+
+swtest_ft__ft_sources += files(
+  'src/swtest/ft/src/fields.cpp',
+  'src/swtest/ft/src/ft.cpp',
+  'src/swtest/ft/src/using2.cpp',
+)
+
+subdir('wrapcfg/modules')
diff --git a/tests/cpp/sw-test/pyproject.toml b/tests/cpp/sw-test/pyproject.toml
new file mode 100644
index 00000000..0a363c13
--- /dev/null
+++ b/tests/cpp/sw-test/pyproject.toml
@@ -0,0 +1,115 @@
+[build-system]
+build-backend = "hatchling.build"
+requires = ["semiwrap", "hatch-meson", "hatchling"]
+
+[project]
+name = "sw-test"
+description = "semiwrap test program"
+version = "0.0.1"
+
+[tool.hatch.build.hooks.semiwrap]
+
+[tool.hatch.build.hooks.meson]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/swtest"]
+
+#
+# Semiwrap configuration
+#
+
+[tool.semiwrap]
+
+[tool.semiwrap.extension_modules."swtest.ft._ft"]
+depends = ["sw-test-casters"]
+wraps = ["swtest_base__module"]
+yaml_path = "wrapcfg/ft"
+includes = ["src/swtest/ft/include"]
+
+[tool.semiwrap.extension_modules."swtest.ft._ft".headers]
+abstract = "abstract.h"
+base_qualname = "base_qualname.h"
+base_qualname_hidden = "base_qualname_hidden.h"
+buffers = "buffers.h"
+custom_type_caster = "custom_type_caster.h"
+defaults = "defaults.h"
+docstrings = "docstrings.h"
+docstrings_append = "docstrings_append.h"
+enums = "enums.h"
+factory = "factory.h"
+fields = "fields.h"
+gilsafe_container = "gilsafe_container.h"
+keepalive = "keepalive.h"
+ignore = "ignore.h"
+ignored_by_default = "ignored_by_default.h"
+inline_code = "inline_code.h"
+lifetime = "lifetime.h"
+nested = "nested.h"
+ns_class = "ns_class.h"
+ns_hidden = "ns_hidden.h"
+operators = "operators.h"
+overloads = "overloads.h"
+parameters = "parameters.h"
+refqual = "refqual.h"
+remote_class = "remote_class.h"
+remote_trampoline = "remote_trampoline.h"
+rename = "rename.h"
+retval = "retval.h"
+subpkg = "subpkg.h"
+static_only = "static_only.h"
+trampoline = "trampoline.h"
+type_caster = "type_caster.h"
+type_caster_nested = "type_caster_nested.h"
+using = "using.h"
+using2 = "using2.h"
+virtual_comma = "virtual_comma.h"
+virtual_xform = "virtual_xform.h"
+
+# Inheritance
+IBase = "inheritance/ibase.h"
+IChild = "inheritance/ichild.h"
+IMChild = "inheritance/imchild.h"
+IGChild = "inheritance/igchild.h"
+Overloaded = "inheritance/overloaded.h"
+mvi = "inheritance/mvi.h"
+usingparent = "inheritance/usingparent.h"
+
+# Protection
+PBase = "protection/pbase.h"
+PChild = "protection/pchild.h"
+PGChild = "protection/pgchild.h"
+
+# Templates
+tbase = "templates/tbase.h"
+tcrtp = "templates/tcrtp.h"
+tcrtpfwd = "templates/tcrtpfwd.h"
+tconcrete = "templates/tconcrete.h"
+
+tvbase = "templates/tvbase.h"
+tvchild = "templates/tvchild.h"
+
+tbasic = "templates/basic.h"
+tdependent_base = "templates/dependent_base.h"
+tdependent_param = "templates/dependent_param.h"
+tdependent_using = "templates/dependent_using.h"
+tdependent_using2 = "templates/dependent_using2.h"
+tfn = "templates/fn.h"
+tnumeric = "templates/numeric.h"
+tnested = "templates/nested.h"
+
+
+
+[tool.semiwrap.export_type_casters.sw-test-casters]
+pypackage = "swtest.tc"
+includedir = ["src/swtest/tc/include"]
+
+[[tool.semiwrap.export_type_casters.sw-test-casters.headers]]
+header = "rpyint_type_caster.h"
+types = ["rpy::rpyint"]
+default_arg_cast = true
+
+[[tool.semiwrap.export_type_casters.sw-test-casters.headers]]
+header = "rpyint_type_caster.h"
+types = ["rpyint_plus_5"]
+
+
diff --git a/robotpy_build/config/__init__.py b/tests/cpp/sw-test/src/swtest/__init__.py
similarity index 100%
rename from robotpy_build/config/__init__.py
rename to tests/cpp/sw-test/src/swtest/__init__.py
diff --git a/tests/cpp/rpytest/ft/__init__.py b/tests/cpp/sw-test/src/swtest/ft/__init__.py
similarity index 96%
rename from tests/cpp/rpytest/ft/__init__.py
rename to tests/cpp/sw-test/src/swtest/ft/__init__.py
index 9e919f01..b04e5cb0 100644
--- a/tests/cpp/rpytest/ft/__init__.py
+++ b/tests/cpp/sw-test/src/swtest/ft/__init__.py
@@ -1,7 +1,7 @@
-from . import _init_rpytest_ft
+import swtest_base
 
-# autogenerated by 'robotpy-build create-imports rpytest.ft rpytest.ft._rpytest_ft'
-from ._rpytest_ft import (
+# autogenerated by 'robotpy-build create-imports rpytest.ft swtest.ft._ft'
+from ._ft import (
     Abstract,
     Buffers,
     ClassWithFields,
diff --git a/tests/cpp/rpytest/ft/include/abstract.h b/tests/cpp/sw-test/src/swtest/ft/include/abstract.h
similarity index 87%
rename from tests/cpp/rpytest/ft/include/abstract.h
rename to tests/cpp/sw-test/src/swtest/ft/include/abstract.h
index 431aa4f4..b558aa4f 100644
--- a/tests/cpp/rpytest/ft/include/abstract.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/abstract.h
@@ -8,6 +8,7 @@ struct Abstract
 struct PrivateAbstract
 {
     PrivateAbstract() {}
+    virtual ~PrivateAbstract() = default;
 
     static int getPrivateOverride(PrivateAbstract *p) {
         return p->mustOverrideMe();
diff --git a/tests/cpp/rpytest/ft/include/base_qualname.h b/tests/cpp/sw-test/src/swtest/ft/include/base_qualname.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/base_qualname.h
rename to tests/cpp/sw-test/src/swtest/ft/include/base_qualname.h
diff --git a/tests/cpp/rpytest/ft/include/base_qualname_hidden.h b/tests/cpp/sw-test/src/swtest/ft/include/base_qualname_hidden.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/base_qualname_hidden.h
rename to tests/cpp/sw-test/src/swtest/ft/include/base_qualname_hidden.h
diff --git a/tests/cpp/rpytest/ft/include/buffers.h b/tests/cpp/sw-test/src/swtest/ft/include/buffers.h
similarity index 97%
rename from tests/cpp/rpytest/ft/include/buffers.h
rename to tests/cpp/sw-test/src/swtest/ft/include/buffers.h
index 43b0bb72..e6b25f5c 100644
--- a/tests/cpp/rpytest/ft/include/buffers.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/buffers.h
@@ -6,6 +6,8 @@
 class Buffers {
 public:
 
+    virtual ~Buffers() = default;
+
     // in
     void set_buffer(const uint8_t *data, size_t len) {
         m_buf.resize(len);
diff --git a/tests/cpp/rpytest/ft/include/custom_type_caster.h b/tests/cpp/sw-test/src/swtest/ft/include/custom_type_caster.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/custom_type_caster.h
rename to tests/cpp/sw-test/src/swtest/ft/include/custom_type_caster.h
diff --git a/tests/cpp/rpytest/ft/include/defaults.h b/tests/cpp/sw-test/src/swtest/ft/include/defaults.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/defaults.h
rename to tests/cpp/sw-test/src/swtest/ft/include/defaults.h
diff --git a/tests/cpp/rpytest/ft/include/docstrings.h b/tests/cpp/sw-test/src/swtest/ft/include/docstrings.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/docstrings.h
rename to tests/cpp/sw-test/src/swtest/ft/include/docstrings.h
diff --git a/tests/cpp/rpytest/ft/include/docstrings_append.h b/tests/cpp/sw-test/src/swtest/ft/include/docstrings_append.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/docstrings_append.h
rename to tests/cpp/sw-test/src/swtest/ft/include/docstrings_append.h
diff --git a/tests/cpp/rpytest/ft/include/enums.h b/tests/cpp/sw-test/src/swtest/ft/include/enums.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/enums.h
rename to tests/cpp/sw-test/src/swtest/ft/include/enums.h
diff --git a/tests/cpp/rpytest/ft/include/factory.h b/tests/cpp/sw-test/src/swtest/ft/include/factory.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/factory.h
rename to tests/cpp/sw-test/src/swtest/ft/include/factory.h
diff --git a/tests/cpp/rpytest/ft/include/fields.h b/tests/cpp/sw-test/src/swtest/ft/include/fields.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/fields.h
rename to tests/cpp/sw-test/src/swtest/ft/include/fields.h
diff --git a/tests/cpp/rpytest/ft/include/gilsafe_container.h b/tests/cpp/sw-test/src/swtest/ft/include/gilsafe_container.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/gilsafe_container.h
rename to tests/cpp/sw-test/src/swtest/ft/include/gilsafe_container.h
diff --git a/tests/cpp/rpytest/ft/include/ignore.h b/tests/cpp/sw-test/src/swtest/ft/include/ignore.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/ignore.h
rename to tests/cpp/sw-test/src/swtest/ft/include/ignore.h
diff --git a/tests/cpp/rpytest/ft/include/ignored_by_default.h b/tests/cpp/sw-test/src/swtest/ft/include/ignored_by_default.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/ignored_by_default.h
rename to tests/cpp/sw-test/src/swtest/ft/include/ignored_by_default.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/ibase.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/ibase.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/ibase.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/ibase.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/ichild.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/ichild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/ichild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/ichild.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/igchild.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/igchild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/igchild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/igchild.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/imchild.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/imchild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/imchild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/imchild.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/mvi.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/mvi.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/mvi.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/mvi.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/overloaded.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/overloaded.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/overloaded.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/overloaded.h
diff --git a/tests/cpp/rpytest/ft/include/inheritance/usingparent.h b/tests/cpp/sw-test/src/swtest/ft/include/inheritance/usingparent.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inheritance/usingparent.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inheritance/usingparent.h
diff --git a/tests/cpp/rpytest/ft/include/inline_code.h b/tests/cpp/sw-test/src/swtest/ft/include/inline_code.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/inline_code.h
rename to tests/cpp/sw-test/src/swtest/ft/include/inline_code.h
diff --git a/tests/cpp/rpytest/ft/include/keepalive.h b/tests/cpp/sw-test/src/swtest/ft/include/keepalive.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/keepalive.h
rename to tests/cpp/sw-test/src/swtest/ft/include/keepalive.h
diff --git a/tests/cpp/rpytest/ft/include/lifetime.h b/tests/cpp/sw-test/src/swtest/ft/include/lifetime.h
similarity index 89%
rename from tests/cpp/rpytest/ft/include/lifetime.h
rename to tests/cpp/sw-test/src/swtest/ft/include/lifetime.h
index eb04dde9..64f31a04 100644
--- a/tests/cpp/rpytest/ft/include/lifetime.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/lifetime.h
@@ -3,6 +3,8 @@
 #include <memory>
 
 struct LTWithVirtual {
+    virtual ~LTWithVirtual() = default;
+
     virtual bool get_bool() {
         return false;
     }
diff --git a/tests/cpp/rpytest/ft/include/nested.h b/tests/cpp/sw-test/src/swtest/ft/include/nested.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/nested.h
rename to tests/cpp/sw-test/src/swtest/ft/include/nested.h
diff --git a/tests/cpp/rpytest/ft/include/ns_class.h b/tests/cpp/sw-test/src/swtest/ft/include/ns_class.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/ns_class.h
rename to tests/cpp/sw-test/src/swtest/ft/include/ns_class.h
diff --git a/tests/cpp/rpytest/ft/include/ns_hidden.h b/tests/cpp/sw-test/src/swtest/ft/include/ns_hidden.h
similarity index 96%
rename from tests/cpp/rpytest/ft/include/ns_hidden.h
rename to tests/cpp/sw-test/src/swtest/ft/include/ns_hidden.h
index 75c8b4f6..53e494de 100644
--- a/tests/cpp/rpytest/ft/include/ns_hidden.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/ns_hidden.h
@@ -12,7 +12,7 @@ namespace o {
         virtual ~O() = default;
     };
 
-    class AnotherC;
+    struct AnotherC;
 };
 
 namespace n::h {
diff --git a/tests/cpp/rpytest/ft/include/operators.h b/tests/cpp/sw-test/src/swtest/ft/include/operators.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/operators.h
rename to tests/cpp/sw-test/src/swtest/ft/include/operators.h
diff --git a/tests/cpp/rpytest/ft/include/overloads.h b/tests/cpp/sw-test/src/swtest/ft/include/overloads.h
similarity index 89%
rename from tests/cpp/rpytest/ft/include/overloads.h
rename to tests/cpp/sw-test/src/swtest/ft/include/overloads.h
index b0cfdce3..836a10db 100644
--- a/tests/cpp/rpytest/ft/include/overloads.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/overloads.h
@@ -29,6 +29,11 @@ struct OverloadedObject
         return o;
     }
 
+    // checking that param override works
+    int overloaded(int a, int b, int c) {
+        return a + b + c;
+    }
+
     // This shows rtnType is inconsistent in CppHeaderParser
     const OverloadedObject& overloaded() {
         return *this;
diff --git a/tests/cpp/rpytest/ft/include/parameters.h b/tests/cpp/sw-test/src/swtest/ft/include/parameters.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/parameters.h
rename to tests/cpp/sw-test/src/swtest/ft/include/parameters.h
diff --git a/tests/cpp/rpytest/ft/include/protection/pbase.h b/tests/cpp/sw-test/src/swtest/ft/include/protection/pbase.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/protection/pbase.h
rename to tests/cpp/sw-test/src/swtest/ft/include/protection/pbase.h
diff --git a/tests/cpp/rpytest/ft/include/protection/pchild.h b/tests/cpp/sw-test/src/swtest/ft/include/protection/pchild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/protection/pchild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/protection/pchild.h
diff --git a/tests/cpp/rpytest/ft/include/protection/pgchild.h b/tests/cpp/sw-test/src/swtest/ft/include/protection/pgchild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/protection/pgchild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/protection/pgchild.h
diff --git a/tests/cpp/rpytest/ft/include/refqual.h b/tests/cpp/sw-test/src/swtest/ft/include/refqual.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/refqual.h
rename to tests/cpp/sw-test/src/swtest/ft/include/refqual.h
diff --git a/tests/cpp/sw-test/src/swtest/ft/include/remote_trampoline.h b/tests/cpp/sw-test/src/swtest/ft/include/remote_trampoline.h
new file mode 100644
index 00000000..0f3a33d9
--- /dev/null
+++ b/tests/cpp/sw-test/src/swtest/ft/include/remote_trampoline.h
@@ -0,0 +1,11 @@
+#pragma  once
+
+#include "baseclass.h"
+
+class RemoteTrampoline : public abaseclass {
+public:
+
+    inline virtual std::string fn() {
+        return "RemoteTrampoline";
+    }
+};
\ No newline at end of file
diff --git a/tests/cpp/rpytest/ft/include/rename.h b/tests/cpp/sw-test/src/swtest/ft/include/rename.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/rename.h
rename to tests/cpp/sw-test/src/swtest/ft/include/rename.h
diff --git a/tests/cpp/rpytest/ft/include/retval.h b/tests/cpp/sw-test/src/swtest/ft/include/retval.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/retval.h
rename to tests/cpp/sw-test/src/swtest/ft/include/retval.h
diff --git a/tests/cpp/rpytest/ft/include/static_only.h b/tests/cpp/sw-test/src/swtest/ft/include/static_only.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/static_only.h
rename to tests/cpp/sw-test/src/swtest/ft/include/static_only.h
diff --git a/tests/cpp/rpytest/ft/include/subpkg.h b/tests/cpp/sw-test/src/swtest/ft/include/subpkg.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/subpkg.h
rename to tests/cpp/sw-test/src/swtest/ft/include/subpkg.h
diff --git a/tests/cpp/rpytest/ft/include/templates/basic.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/basic.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/basic.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/basic.h
diff --git a/tests/cpp/rpytest/ft/include/templates/dependent_base.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_base.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/dependent_base.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_base.h
diff --git a/tests/cpp/rpytest/ft/include/templates/dependent_param.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_param.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/dependent_param.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_param.h
diff --git a/tests/cpp/rpytest/ft/include/templates/dependent_using.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_using.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/dependent_using.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_using.h
diff --git a/tests/cpp/rpytest/ft/include/templates/dependent_using2.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_using2.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/dependent_using2.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/dependent_using2.h
diff --git a/tests/cpp/rpytest/ft/include/templates/fn.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/fn.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/fn.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/fn.h
diff --git a/tests/cpp/rpytest/ft/include/templates/nested.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/nested.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/nested.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/nested.h
diff --git a/tests/cpp/rpytest/ft/include/templates/numeric.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/numeric.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/numeric.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/numeric.h
diff --git a/tests/cpp/rpytest/ft/include/templates/tbase.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tbase.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/tbase.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tbase.h
diff --git a/tests/cpp/rpytest/ft/include/templates/tconcrete.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tconcrete.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/tconcrete.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tconcrete.h
diff --git a/tests/cpp/rpytest/ft/include/templates/tcrtp.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tcrtp.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/tcrtp.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tcrtp.h
diff --git a/tests/cpp/rpytest/ft/include/templates/tcrtpfwd.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tcrtpfwd.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/tcrtpfwd.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tcrtpfwd.h
diff --git a/tests/cpp/rpytest/ft/include/templates/tvbase.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tvbase.h
similarity index 87%
rename from tests/cpp/rpytest/ft/include/templates/tvbase.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tvbase.h
index be148880..e25f1e59 100644
--- a/tests/cpp/rpytest/ft/include/templates/tvbase.h
+++ b/tests/cpp/sw-test/src/swtest/ft/include/templates/tvbase.h
@@ -10,6 +10,8 @@ struct TVParam {
 template <typename T>
 struct TVBase {
 
+    virtual ~TVBase() = default;
+
     virtual std::string get(T t) const {
         return "TVBase " + std::to_string(t.get());
     }
diff --git a/tests/cpp/rpytest/ft/include/templates/tvchild.h b/tests/cpp/sw-test/src/swtest/ft/include/templates/tvchild.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/templates/tvchild.h
rename to tests/cpp/sw-test/src/swtest/ft/include/templates/tvchild.h
diff --git a/tests/cpp/rpytest/ft/include/trampoline.h b/tests/cpp/sw-test/src/swtest/ft/include/trampoline.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/trampoline.h
rename to tests/cpp/sw-test/src/swtest/ft/include/trampoline.h
diff --git a/tests/cpp/rpytest/ft/include/type_caster.h b/tests/cpp/sw-test/src/swtest/ft/include/type_caster.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/type_caster.h
rename to tests/cpp/sw-test/src/swtest/ft/include/type_caster.h
diff --git a/tests/cpp/rpytest/ft/include/type_caster_nested.h b/tests/cpp/sw-test/src/swtest/ft/include/type_caster_nested.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/type_caster_nested.h
rename to tests/cpp/sw-test/src/swtest/ft/include/type_caster_nested.h
diff --git a/tests/cpp/rpytest/ft/include/using.h b/tests/cpp/sw-test/src/swtest/ft/include/using.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/using.h
rename to tests/cpp/sw-test/src/swtest/ft/include/using.h
diff --git a/tests/cpp/rpytest/ft/include/using2.h b/tests/cpp/sw-test/src/swtest/ft/include/using2.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/using2.h
rename to tests/cpp/sw-test/src/swtest/ft/include/using2.h
diff --git a/tests/cpp/rpytest/ft/include/using_companion.h b/tests/cpp/sw-test/src/swtest/ft/include/using_companion.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/using_companion.h
rename to tests/cpp/sw-test/src/swtest/ft/include/using_companion.h
diff --git a/tests/cpp/rpytest/ft/include/virtual_comma.h b/tests/cpp/sw-test/src/swtest/ft/include/virtual_comma.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/virtual_comma.h
rename to tests/cpp/sw-test/src/swtest/ft/include/virtual_comma.h
diff --git a/tests/cpp/rpytest/ft/include/virtual_xform.h b/tests/cpp/sw-test/src/swtest/ft/include/virtual_xform.h
similarity index 100%
rename from tests/cpp/rpytest/ft/include/virtual_xform.h
rename to tests/cpp/sw-test/src/swtest/ft/include/virtual_xform.h
diff --git a/tests/cpp/rpytest/ft/src/fields.cpp b/tests/cpp/sw-test/src/swtest/ft/src/fields.cpp
similarity index 100%
rename from tests/cpp/rpytest/ft/src/fields.cpp
rename to tests/cpp/sw-test/src/swtest/ft/src/fields.cpp
diff --git a/tests/cpp/rpytest/ft/src/ft.cpp b/tests/cpp/sw-test/src/swtest/ft/src/ft.cpp
similarity index 88%
rename from tests/cpp/rpytest/ft/src/ft.cpp
rename to tests/cpp/sw-test/src/swtest/ft/src/ft.cpp
index 94c0d113..ffb75119 100644
--- a/tests/cpp/rpytest/ft/src/ft.cpp
+++ b/tests/cpp/sw-test/src/swtest/ft/src/ft.cpp
@@ -1,7 +1,7 @@
 
-#include <rpygen_wrapper.hpp>
+#include <semiwrap_init.swtest.ft._ft.hpp>
 
-RPYBUILD_PYBIND11_MODULE(m)
+SEMIWRAP_PYBIND11_MODULE(m)
 {
     initWrapper(m);
 
diff --git a/tests/cpp/rpytest/ft/src/using2.cpp b/tests/cpp/sw-test/src/swtest/ft/src/using2.cpp
similarity index 100%
rename from tests/cpp/rpytest/ft/src/using2.cpp
rename to tests/cpp/sw-test/src/swtest/ft/src/using2.cpp
diff --git a/tests/cpp/sw-test/src/swtest/ft/subpkg.py b/tests/cpp/sw-test/src/swtest/ft/subpkg.py
new file mode 100644
index 00000000..52308958
--- /dev/null
+++ b/tests/cpp/sw-test/src/swtest/ft/subpkg.py
@@ -0,0 +1,4 @@
+# autogenerated by 'robotpy-build create-imports rpytest.ft swtest.ft._ft.subpkg'
+from ._ft.subpkg import SPClass, sp_func
+
+__all__ = ["SPClass", "sp_func"]
diff --git a/tests/cpp/rpytest/tc/include/rpyint.h b/tests/cpp/sw-test/src/swtest/tc/include/rpyint.h
similarity index 100%
rename from tests/cpp/rpytest/tc/include/rpyint.h
rename to tests/cpp/sw-test/src/swtest/tc/include/rpyint.h
diff --git a/tests/cpp/rpytest/tc/include/rpyint_type_caster.h b/tests/cpp/sw-test/src/swtest/tc/include/rpyint_type_caster.h
similarity index 97%
rename from tests/cpp/rpytest/tc/include/rpyint_type_caster.h
rename to tests/cpp/sw-test/src/swtest/tc/include/rpyint_type_caster.h
index 0e1cad72..f279739a 100644
--- a/tests/cpp/rpytest/tc/include/rpyint_type_caster.h
+++ b/tests/cpp/sw-test/src/swtest/tc/include/rpyint_type_caster.h
@@ -17,7 +17,7 @@ namespace pybind11 { namespace detail {
          * then you need to add a type alias or pybind11-stubgen gets
          * angry at you -- so this has to be a fully qualified name
          */
-        PYBIND11_TYPE_CASTER(rpy::rpyint, const_name("rpytest.ft.rpyint"));
+        PYBIND11_TYPE_CASTER(rpy::rpyint, const_name("swtest.ft.rpyint"));
 
         /**
          * Conversion part 1 (Python->C++): convert a PyObject into a inty
diff --git a/tests/cpp/sw-test/wrapcfg/.gitignore b/tests/cpp/sw-test/wrapcfg/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test/wrapcfg/ft/IBase.yml b/tests/cpp/sw-test/wrapcfg/ft/IBase.yml
new file mode 100644
index 00000000..faba2579
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/IBase.yml
@@ -0,0 +1,18 @@
+---
+
+classes:
+  inheritance::IBase:
+    methods:
+      IBase:
+      baseOnly:
+      baseAndGrandchild:
+      baseAndChild:
+      baseAndPyChild:
+      baseAndChildFinal:
+      getBaseOnly:
+      getBaseAndGrandchild:
+      getBaseAndChild:
+      getBaseAndPyChild:
+      getBaseAndChildFinal:
+      protectedMethod:
+      protectedOutMethod:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/IChild.yml b/tests/cpp/sw-test/wrapcfg/ft/IChild.yml
new file mode 100644
index 00000000..69ad2af5
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/IChild.yml
@@ -0,0 +1,12 @@
+---
+
+classes:
+  inheritance::IChild:
+    attributes:
+      i:
+    methods:
+      IChild:
+      baseAndChild:
+      baseAndChildFinal:
+      getI:
+  inheritance::IFinal:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/IGChild.yml b/tests/cpp/sw-test/wrapcfg/ft/IGChild.yml
new file mode 100644
index 00000000..7291758f
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/IGChild.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  inheritance::IGrandChild:
+    methods:
+      baseAndGrandchild:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/IMChild.yml b/tests/cpp/sw-test/wrapcfg/ft/IMChild.yml
new file mode 100644
index 00000000..ab436e9e
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/IMChild.yml
@@ -0,0 +1,8 @@
+---
+
+classes:
+  inheritance::IMOther:
+  inheritance::IMChild:
+    methods:
+      baseAndChild:
+      baseAndChildFinal:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/Overloaded.yml b/tests/cpp/sw-test/wrapcfg/ft/Overloaded.yml
new file mode 100644
index 00000000..7bb1ae88
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/Overloaded.yml
@@ -0,0 +1,26 @@
+---
+
+functions:
+  OBinitOB:
+  OCinitOB:
+  OCinitOC:
+  OGinitOC:
+classes:
+  overloaded_inheritance::OBInitializer:
+    methods:
+      doInit:
+  overloaded_inheritance::OB:
+    methods:
+      Init:
+  overloaded_inheritance::OCInitializer:
+    methods:
+      doInit:
+  overloaded_inheritance::OC:
+    methods:
+      Init:
+        overloads:
+          OBInitializer&:
+          OCInitializer&:
+  overloaded_inheritance::OG:
+    methods:
+      Init:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/PBase.yml b/tests/cpp/sw-test/wrapcfg/ft/PBase.yml
new file mode 100644
index 00000000..b0cd4b37
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/PBase.yml
@@ -0,0 +1,10 @@
+---
+
+classes:
+  PBase:
+    methods:
+      getChannel:
+      privateFinalTestC:
+      privateFinalTestGC:
+      privateOverrideTestC:
+      setChannel:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/PChild.yml b/tests/cpp/sw-test/wrapcfg/ft/PChild.yml
new file mode 100644
index 00000000..7466e8d3
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/PChild.yml
@@ -0,0 +1,7 @@
+---
+
+classes:
+  PChild:
+    methods:
+      privateFinalTestGC:
+      PChild:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/PGChild.yml b/tests/cpp/sw-test/wrapcfg/ft/PGChild.yml
new file mode 100644
index 00000000..2eb166c8
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/PGChild.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  PGChild:
+    methods:
+      PGChild:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/abstract.yml b/tests/cpp/sw-test/wrapcfg/ft/abstract.yml
new file mode 100644
index 00000000..dc54802d
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/abstract.yml
@@ -0,0 +1,10 @@
+---
+
+classes:
+  Abstract:
+    methods:
+      mustOverrideMe:
+  PrivateAbstract:
+    methods:
+      PrivateAbstract:
+      getPrivateOverride:
diff --git a/tests/cpp/gen/ft/base_qualname.yml b/tests/cpp/sw-test/wrapcfg/ft/base_qualname.yml
similarity index 74%
rename from tests/cpp/gen/ft/base_qualname.yml
rename to tests/cpp/sw-test/wrapcfg/ft/base_qualname.yml
index fbda2543..48b5ce32 100644
--- a/tests/cpp/gen/ft/base_qualname.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/base_qualname.yml
@@ -1,11 +1,11 @@
 classes:
-  BaseQualname:
+  bq::detail::BaseQualname:
     base_qualnames:
       Hidden: bq::Hidden
     methods:
       BaseQualname:
   
-  THBaseQualname:
+  bq::detail::THBaseQualname:
     base_qualnames:
       THiddenBase1: bq::THiddenBase1<bq::THiddenBase2<int>>
       # parse with _parse_template_decl ... or split it out
@@ -13,13 +13,15 @@ classes:
     methods:
       BaseQualname:
 
-  TVisibleBase1:
+  bq::detail::TVisibleBase1:
     template_params:
     - T
-  TVisibleBase2:
+  bq::detail::TVisibleBase2:
     template_params:
     - T
 
+  bq::detail::TVBaseQualname:
+
 templates:
   TVisibleBase:
     qualname: bq::detail::TVisibleBase1
diff --git a/tests/cpp/gen/ft/base_qualname_hidden.yml b/tests/cpp/sw-test/wrapcfg/ft/base_qualname_hidden.yml
similarity index 74%
rename from tests/cpp/gen/ft/base_qualname_hidden.yml
rename to tests/cpp/sw-test/wrapcfg/ft/base_qualname_hidden.yml
index 3eecbdf0..c8a7f93d 100644
--- a/tests/cpp/gen/ft/base_qualname_hidden.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/base_qualname_hidden.yml
@@ -1,11 +1,13 @@
 classes:
-  THiddenBase1:
+  bq::THiddenBase1:
     template_params:
     - T
-  THiddenBase2:
+  bq::THiddenBase2:
     template_params:
     - T
 
+  bq::Hidden:
+
 templates:
   THiddenBase:
     qualname: bq::THiddenBase1
diff --git a/tests/cpp/sw-test/wrapcfg/ft/buffers.yml b/tests/cpp/sw-test/wrapcfg/ft/buffers.yml
new file mode 100644
index 00000000..cc4181f6
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/buffers.yml
@@ -0,0 +1,28 @@
+---
+
+classes:
+  Buffers:
+    methods:
+      set_buffer:
+        buffers:
+        - { type: IN, src: data, len: len }
+      get_buffer2:
+        buffers:
+        - { type: OUT, src: data, len: len }
+      get_buffer1:
+        buffers:
+        - { type: OUT, src: data, len: len }
+      inout_buffer:
+        buffers:
+        - { type: IN, src: indata, len: size }
+        - { type: OUT, src: outdata, len: size }
+
+      v_set_buffer:
+        buffers:
+        - { type: IN, src: data, len: len }
+      v_get_buffer2:
+        buffers:
+        - { type: OUT, src: data, len: len }
+      v_get_buffer1:
+        buffers:
+        - { type: OUT, src: data, len: len }
\ No newline at end of file
diff --git a/tests/cpp/sw-test/wrapcfg/ft/custom_type_caster.yml b/tests/cpp/sw-test/wrapcfg/ft/custom_type_caster.yml
new file mode 100644
index 00000000..72eb8d35
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/custom_type_caster.yml
@@ -0,0 +1,5 @@
+---
+
+functions:
+  convertRpyintToInt:
+  checkConvertRpyintToInt:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/defaults.yml b/tests/cpp/sw-test/wrapcfg/ft/defaults.yml
new file mode 100644
index 00000000..c31e8c48
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/defaults.yml
@@ -0,0 +1,13 @@
+---
+
+functions:
+  fnSimpleDefaultParam:
+  fnEmptyDefaultParam:
+classes:
+  NotImportant:
+  HasDefaults:
+    attributes:
+      kDefVal:
+    methods:
+      getVal:
+      objectDefault:
diff --git a/tests/cpp/gen/ft/docstrings.yml b/tests/cpp/sw-test/wrapcfg/ft/docstrings.yml
similarity index 100%
rename from tests/cpp/gen/ft/docstrings.yml
rename to tests/cpp/sw-test/wrapcfg/ft/docstrings.yml
diff --git a/tests/cpp/gen/ft/docstrings_append.yml b/tests/cpp/sw-test/wrapcfg/ft/docstrings_append.yml
similarity index 100%
rename from tests/cpp/gen/ft/docstrings_append.yml
rename to tests/cpp/sw-test/wrapcfg/ft/docstrings_append.yml
diff --git a/tests/cpp/gen/ft/enums.yml b/tests/cpp/sw-test/wrapcfg/ft/enums.yml
similarity index 53%
rename from tests/cpp/gen/ft/enums.yml
rename to tests/cpp/sw-test/wrapcfg/ft/enums.yml
index 445f15c2..8d4e7516 100644
--- a/tests/cpp/gen/ft/enums.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/enums.yml
@@ -7,7 +7,11 @@ enums:
     arithmetic: true
 
 classes:
+  EnumContainer:
   EnumContainer2:
     enums:
       InnerMathEnum:
         arithmetic: true
+  enum_container_ns::NSEnumContainer:
+  enum_container_ns::NSEnumContainer2:
+  enum_container_ns::NSEnumContainer2::InnerEnumContainer:
diff --git a/tests/cpp/gen/ft/factory.yml b/tests/cpp/sw-test/wrapcfg/ft/factory.yml
similarity index 100%
rename from tests/cpp/gen/ft/factory.yml
rename to tests/cpp/sw-test/wrapcfg/ft/factory.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/fields.yml b/tests/cpp/sw-test/wrapcfg/ft/fields.yml
new file mode 100644
index 00000000..84bcee5c
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/fields.yml
@@ -0,0 +1,27 @@
+---
+
+classes:
+  ClassWithFields:
+    attributes:
+      array_of_two:
+      actual_int:
+      ref_int:
+      const_field:
+      static_int:
+      static_const:
+      static_constexpr:
+      should_be_ignored:
+    methods:
+      ClassWithFields:
+      get_array_of_two:
+  StructWithBitfields:
+    attributes:
+      enabled:
+      autonomous:
+      test:
+      eStop:
+      fmsAttached:
+      dsAttached:
+      control_reserved:
+    methods:
+      StructWithBitfields:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/gilsafe_container.yml b/tests/cpp/sw-test/wrapcfg/ft/gilsafe_container.yml
new file mode 100644
index 00000000..26c8f906
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/gilsafe_container.yml
@@ -0,0 +1,7 @@
+---
+
+classes:
+  GilsafeContainer:
+    methods:
+      assign:
+      check:
diff --git a/tests/cpp/gen/ft/ignore.yml b/tests/cpp/sw-test/wrapcfg/ft/ignore.yml
similarity index 100%
rename from tests/cpp/gen/ft/ignore.yml
rename to tests/cpp/sw-test/wrapcfg/ft/ignore.yml
diff --git a/tests/cpp/gen/ft/ignored_by_default.yml b/tests/cpp/sw-test/wrapcfg/ft/ignored_by_default.yml
similarity index 76%
rename from tests/cpp/gen/ft/ignored_by_default.yml
rename to tests/cpp/sw-test/wrapcfg/ft/ignored_by_default.yml
index dc5a4243..38c035be 100644
--- a/tests/cpp/gen/ft/ignored_by_default.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/ignored_by_default.yml
@@ -9,6 +9,7 @@ functions:
   id_fnEnable:
     ignore: false
 classes:
+  # id_IgnoreClass doesn't need to be specified because of default-ignore
   id_EnabledClass:
     ignore: false
     enums:
diff --git a/tests/cpp/gen/ft/inline_code.yml b/tests/cpp/sw-test/wrapcfg/ft/inline_code.yml
similarity index 100%
rename from tests/cpp/gen/ft/inline_code.yml
rename to tests/cpp/sw-test/wrapcfg/ft/inline_code.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/keepalive.yml b/tests/cpp/sw-test/wrapcfg/ft/keepalive.yml
new file mode 100644
index 00000000..0bd95d34
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/keepalive.yml
@@ -0,0 +1,12 @@
+---
+
+classes:
+  PatientRef:
+    attributes:
+      dead:
+  Nurse:
+    attributes:
+      m_p:
+    methods:
+      Nurse:
+      patientDead:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/lifetime.yml b/tests/cpp/sw-test/wrapcfg/ft/lifetime.yml
new file mode 100644
index 00000000..59bdb9a7
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/lifetime.yml
@@ -0,0 +1,10 @@
+---
+
+classes:
+  LTWithVirtual:
+    methods:
+      get_bool:
+  LTTester:
+    methods:
+      set_val:
+      get_bool:
diff --git a/tests/cpp/gen/ft/mvi.yml b/tests/cpp/sw-test/wrapcfg/ft/mvi.yml
similarity index 83%
rename from tests/cpp/gen/ft/mvi.yml
rename to tests/cpp/sw-test/wrapcfg/ft/mvi.yml
index ea253273..ecb2f79f 100644
--- a/tests/cpp/gen/ft/mvi.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/mvi.yml
@@ -1,31 +1,31 @@
 classes:
-  MVB:
+  inheritance::MVB:
     attributes:
       b:
     methods:
       MVB:
       get_b_b:
-  MVC:
+  inheritance::MVC:
     attributes:
       c:
     methods:
       get_c_b:
       get_c_c:
-  MVD0:
+  inheritance::MVD0:
     attributes:
       d0:
     methods:
       get_d0_b:
       get_d0_c:
       get_d0_d0:
-  MVD1:
+  inheritance::MVD1:
     attributes:
       d1:
     methods:
       get_d1_b:
       get_d1_c:
       get_d1_d1:
-  MVE:
+  inheritance::MVE:
     attributes:
       e:
     methods:
@@ -34,7 +34,7 @@ classes:
       get_e_d0:
       get_e_d1:
       get_e_e:
-  MVF:
+  inheritance::MVF:
     force_multiple_inheritance: true
     attributes:
       f:
diff --git a/tests/cpp/gen/ft/nested.yml b/tests/cpp/sw-test/wrapcfg/ft/nested.yml
similarity index 100%
rename from tests/cpp/gen/ft/nested.yml
rename to tests/cpp/sw-test/wrapcfg/ft/nested.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/ns_class.yml b/tests/cpp/sw-test/wrapcfg/ft/ns_class.yml
new file mode 100644
index 00000000..6d8f94a8
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/ns_class.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  ::ns::NSClass:
+    methods:
+      getN:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/ns_hidden.yml b/tests/cpp/sw-test/wrapcfg/ft/ns_hidden.yml
new file mode 100644
index 00000000..b67e3385
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/ns_hidden.yml
@@ -0,0 +1,15 @@
+---
+
+enums:
+  E:
+classes:
+  o::O:
+    methods:
+      O:
+  n::h::C:
+    methods:
+      fn:
+  ::o::AnotherC:
+    methods:
+      AnotherC:
+      fn:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/operators.yml b/tests/cpp/sw-test/wrapcfg/ft/operators.yml
new file mode 100644
index 00000000..1ebb185d
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/operators.yml
@@ -0,0 +1,16 @@
+---
+
+classes:
+  HasOperator:
+    methods:
+      HasOperator:
+        overloads:
+          "":
+          int:
+      operator==:
+  HasOperatorNoDefault:
+    attributes:
+      x:
+    methods:
+      HasOperatorNoDefault:
+      operator==:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/overloads.yml b/tests/cpp/sw-test/wrapcfg/ft/overloads.yml
new file mode 100644
index 00000000..6e7bedf8
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/overloads.yml
@@ -0,0 +1,35 @@
+---
+
+functions:
+  fnOverload:
+    overloads:
+      int, int:
+      int:
+classes:
+  OverloadedObject:
+    methods:
+      overloaded:
+        overloads:
+          int:
+          const char*:
+          int, int:
+          int, int, int:
+            param_override:
+              a:
+                name: x
+              b:
+                name: y
+              c:
+                name: z
+          "":
+      overloaded_constexpr:
+        overloads:
+          int, int:
+          int, int, int:
+      overloaded_static:
+        overloads:
+          int:
+          const char*:
+      overloaded_private:
+        overloads:
+          int:
diff --git a/tests/cpp/gen/ft/parameters.yml b/tests/cpp/sw-test/wrapcfg/ft/parameters.yml
similarity index 95%
rename from tests/cpp/gen/ft/parameters.yml
rename to tests/cpp/sw-test/wrapcfg/ft/parameters.yml
index e6f40c6b..7d4b9c7a 100644
--- a/tests/cpp/gen/ft/parameters.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/parameters.yml
@@ -23,3 +23,6 @@ functions:
     param_override:
       p:
         no_default: True
+
+classes:
+  Param:
\ No newline at end of file
diff --git a/tests/cpp/gen/ft/refqual.yml b/tests/cpp/sw-test/wrapcfg/ft/refqual.yml
similarity index 100%
rename from tests/cpp/gen/ft/refqual.yml
rename to tests/cpp/sw-test/wrapcfg/ft/refqual.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/remote_class.yml b/tests/cpp/sw-test/wrapcfg/ft/remote_class.yml
new file mode 100644
index 00000000..72a4a2a7
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/remote_class.yml
@@ -0,0 +1,4 @@
+---
+
+classes:
+  RemoteClass:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/remote_trampoline.yml b/tests/cpp/sw-test/wrapcfg/ft/remote_trampoline.yml
new file mode 100644
index 00000000..c74ec9ad
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/remote_trampoline.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  RemoteTrampoline:
+    methods:
+      fn:
diff --git a/tests/cpp/gen/ft/rename.yml b/tests/cpp/sw-test/wrapcfg/ft/rename.yml
similarity index 100%
rename from tests/cpp/gen/ft/rename.yml
rename to tests/cpp/sw-test/wrapcfg/ft/rename.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/retval.yml b/tests/cpp/sw-test/wrapcfg/ft/retval.yml
new file mode 100644
index 00000000..6daee48d
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/retval.yml
@@ -0,0 +1,11 @@
+---
+
+classes:
+  RetvalClass:
+    enums:
+      Retval:
+    methods:
+      get:
+        overloads:
+          "":
+          int:
diff --git a/tests/cpp/gen/ft/static_only.yml b/tests/cpp/sw-test/wrapcfg/ft/static_only.yml
similarity index 100%
rename from tests/cpp/gen/ft/static_only.yml
rename to tests/cpp/sw-test/wrapcfg/ft/static_only.yml
diff --git a/tests/cpp/gen/ft/subpkg.yml b/tests/cpp/sw-test/wrapcfg/ft/subpkg.yml
similarity index 100%
rename from tests/cpp/gen/ft/subpkg.yml
rename to tests/cpp/sw-test/wrapcfg/ft/subpkg.yml
diff --git a/tests/cpp/gen/ft/tbase.yml b/tests/cpp/sw-test/wrapcfg/ft/tbase.yml
similarity index 100%
rename from tests/cpp/gen/ft/tbase.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tbase.yml
diff --git a/tests/cpp/gen/ft/tbasic.yml b/tests/cpp/sw-test/wrapcfg/ft/tbasic.yml
similarity index 84%
rename from tests/cpp/gen/ft/tbasic.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tbasic.yml
index 3c2c4b23..ebea2fce 100644
--- a/tests/cpp/gen/ft/tbasic.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/tbasic.yml
@@ -7,7 +7,7 @@ classes:
       t:
     template_inline_code: |
       cls_TBasic
-        .def("__repr__", [=](const TBasic<T> &self){
+        .def("__repr__", [this](const TBasic<T> &self){
           // checking to see if clsName is available
           return "<" + clsName + ">";
         });
diff --git a/tests/cpp/gen/ft/tconcrete.yml b/tests/cpp/sw-test/wrapcfg/ft/tconcrete.yml
similarity index 100%
rename from tests/cpp/gen/ft/tconcrete.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tconcrete.yml
diff --git a/tests/cpp/gen/ft/tcrtp.yml b/tests/cpp/sw-test/wrapcfg/ft/tcrtp.yml
similarity index 100%
rename from tests/cpp/gen/ft/tcrtp.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tcrtp.yml
diff --git a/tests/cpp/gen/ft/tcrtpfwd.yml b/tests/cpp/sw-test/wrapcfg/ft/tcrtpfwd.yml
similarity index 100%
rename from tests/cpp/gen/ft/tcrtpfwd.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tcrtpfwd.yml
diff --git a/tests/cpp/gen/ft/tdependent_base.yml b/tests/cpp/sw-test/wrapcfg/ft/tdependent_base.yml
similarity index 100%
rename from tests/cpp/gen/ft/tdependent_base.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tdependent_base.yml
diff --git a/tests/cpp/gen/ft/tdependent_param.yml b/tests/cpp/sw-test/wrapcfg/ft/tdependent_param.yml
similarity index 89%
rename from tests/cpp/gen/ft/tdependent_param.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tdependent_param.yml
index 218ccb66..63afe8b2 100644
--- a/tests/cpp/gen/ft/tdependent_param.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/tdependent_param.yml
@@ -1,6 +1,6 @@
 ---
 classes:
-  TDependentParam:
+  whatever::TDependentParam:
     template_params:
     - T
     typealias:
diff --git a/tests/cpp/gen/ft/tdependent_using.yml b/tests/cpp/sw-test/wrapcfg/ft/tdependent_using.yml
similarity index 86%
rename from tests/cpp/gen/ft/tdependent_using.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tdependent_using.yml
index 312221df..f6158f79 100644
--- a/tests/cpp/gen/ft/tdependent_using.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/tdependent_using.yml
@@ -1,6 +1,6 @@
 ---
 classes:
-  TDependentUsing:
+  whatever::TDependentUsing:
     template_params:
     - T
     typealias:
diff --git a/tests/cpp/gen/ft/tdependent_using2.yml b/tests/cpp/sw-test/wrapcfg/ft/tdependent_using2.yml
similarity index 82%
rename from tests/cpp/gen/ft/tdependent_using2.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tdependent_using2.yml
index 8955c57f..ceaa1f0f 100644
--- a/tests/cpp/gen/ft/tdependent_using2.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/tdependent_using2.yml
@@ -1,6 +1,6 @@
 ---
 classes:
-  TDependentUsing2:
+  whatever::TDependentUsing2:
     template_params:
     - T
   
diff --git a/tests/cpp/gen/ft/tfn.yml b/tests/cpp/sw-test/wrapcfg/ft/tfn.yml
similarity index 100%
rename from tests/cpp/gen/ft/tfn.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tfn.yml
diff --git a/tests/cpp/gen/ft/tnested.yml b/tests/cpp/sw-test/wrapcfg/ft/tnested.yml
similarity index 100%
rename from tests/cpp/gen/ft/tnested.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tnested.yml
diff --git a/tests/cpp/gen/ft/tnumeric.yml b/tests/cpp/sw-test/wrapcfg/ft/tnumeric.yml
similarity index 100%
rename from tests/cpp/gen/ft/tnumeric.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tnumeric.yml
diff --git a/tests/cpp/gen/ft/trampoline.yml b/tests/cpp/sw-test/wrapcfg/ft/trampoline.yml
similarity index 91%
rename from tests/cpp/gen/ft/trampoline.yml
rename to tests/cpp/sw-test/wrapcfg/ft/trampoline.yml
index 4b522110..4fd2af0f 100644
--- a/tests/cpp/gen/ft/trampoline.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/trampoline.yml
@@ -1,5 +1,6 @@
 ---
 classes:
+  MoveOnlyParam:
   ClassWithTrampoline:
     trampoline_inline_code: | 
       int get42() { return 42; }
@@ -17,8 +18,8 @@ classes:
           [&](py::function fn) -> int {
             return py::cast<int>(fn(param.i));
           }
-
-
+  ConstexprTrampoline:
+  ChildConstexprTrampoline:
 
 inline_code: |
   cls_ClassWithTrampoline
diff --git a/tests/cpp/gen/ft/tvbase.yml b/tests/cpp/sw-test/wrapcfg/ft/tvbase.yml
similarity index 100%
rename from tests/cpp/gen/ft/tvbase.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tvbase.yml
diff --git a/tests/cpp/gen/ft/tvchild.yml b/tests/cpp/sw-test/wrapcfg/ft/tvchild.yml
similarity index 100%
rename from tests/cpp/gen/ft/tvchild.yml
rename to tests/cpp/sw-test/wrapcfg/ft/tvchild.yml
diff --git a/tests/cpp/sw-test/wrapcfg/ft/type_caster.yml b/tests/cpp/sw-test/wrapcfg/ft/type_caster.yml
new file mode 100644
index 00000000..e3f1f8ea
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/type_caster.yml
@@ -0,0 +1,4 @@
+---
+
+functions:
+  get123:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/type_caster_nested.yml b/tests/cpp/sw-test/wrapcfg/ft/type_caster_nested.yml
new file mode 100644
index 00000000..031fdc64
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/type_caster_nested.yml
@@ -0,0 +1,6 @@
+---
+
+classes:
+  NestedTypecaster:
+    methods:
+      callWithList:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/using.yml b/tests/cpp/sw-test/wrapcfg/ft/using.yml
new file mode 100644
index 00000000..0a2b3179
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/using.yml
@@ -0,0 +1,9 @@
+
+typealias:
+- cr::AlsoCantResolve
+
+classes:
+  cr::inner::ProtectedUsing:
+    typealias:
+    - cr::CantResolve
+  u::FwdDecl:
\ No newline at end of file
diff --git a/tests/cpp/gen/ft/using2.yml b/tests/cpp/sw-test/wrapcfg/ft/using2.yml
similarity index 65%
rename from tests/cpp/gen/ft/using2.yml
rename to tests/cpp/sw-test/wrapcfg/ft/using2.yml
index 09891640..96e668ff 100644
--- a/tests/cpp/gen/ft/using2.yml
+++ b/tests/cpp/sw-test/wrapcfg/ft/using2.yml
@@ -3,12 +3,17 @@ extra_includes:
 - using.h
 
 classes:
-  fancy_list:
+  u::fancy_list:
     template_params:
     - T
-  Using3:
+  u::u2::Using1:
+  u::u2::Using2:
+  u::u2::Using3:
     typealias:
     - template <typename T> using fancy_list = u::fancy_list<T>
+  u::u2::Using4:
+  u::u2::Using5a:
+  u::u2::Using5b:
   
 templates:
   fancy_int_list:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/usingparent.yml b/tests/cpp/sw-test/wrapcfg/ft/usingparent.yml
new file mode 100644
index 00000000..7edf9a8a
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/usingparent.yml
@@ -0,0 +1,10 @@
+---
+
+classes:
+  UPBase:
+    methods:
+      get5:
+  UPChild:
+    methods:
+      get5:
+      get6:
diff --git a/tests/cpp/sw-test/wrapcfg/ft/virtual_comma.yml b/tests/cpp/sw-test/wrapcfg/ft/virtual_comma.yml
new file mode 100644
index 00000000..8fe13bdf
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/ft/virtual_comma.yml
@@ -0,0 +1,11 @@
+---
+
+classes:
+  VirtualComma:
+    methods:
+      getTwoTwo:
+      getRval:
+        overloads:
+          "":
+          int:
+  VirtualComma::RVal:
diff --git a/tests/cpp/gen/ft/virtual_xform.yml b/tests/cpp/sw-test/wrapcfg/ft/virtual_xform.yml
similarity index 100%
rename from tests/cpp/gen/ft/virtual_xform.yml
rename to tests/cpp/sw-test/wrapcfg/ft/virtual_xform.yml
diff --git a/tests/cpp/sw-test/wrapcfg/modules/.gitignore b/tests/cpp/sw-test/wrapcfg/modules/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/modules/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/cpp/sw-test/wrapcfg/trampolines/.gitignore b/tests/cpp/sw-test/wrapcfg/trampolines/.gitignore
new file mode 100644
index 00000000..d698f725
--- /dev/null
+++ b/tests/cpp/sw-test/wrapcfg/trampolines/.gitignore
@@ -0,0 +1 @@
+/meson.build
diff --git a/tests/requirements.txt b/tests/requirements.txt
index 05d02b92..39bb1361 100644
--- a/tests/requirements.txt
+++ b/tests/requirements.txt
@@ -1,3 +1,5 @@
 build
 pytest
-wheel
\ No newline at end of file
+hatchling
+hatch-meson
+ninja
\ No newline at end of file
diff --git a/tests/test_caster_module.py b/tests/test_caster_module.py
new file mode 100644
index 00000000..cf4429cf
--- /dev/null
+++ b/tests/test_caster_module.py
@@ -0,0 +1,10 @@
+from swtest_base._module import add_to_inty
+from sw_caster_consumer._module import add_more_to_inty
+
+
+def test_add_to_inty():
+    assert add_to_inty(1, 2) == 3
+
+
+def test_add_more_to_inty():
+    assert add_more_to_inty(1, 2) == 3
diff --git a/tests/test_downloaded.py b/tests/test_downloaded.py
deleted file mode 100644
index 8de83f3d..00000000
--- a/tests/test_downloaded.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# just ensures the module built correctly, nothing fancy
-import rpytest.dl
-
-
-def test_downloaded_fn():
-    assert rpytest.dl.downloaded_fn(3) == 0x42 + 3
-
-
-def test_extra_content():
-    assert rpytest.dl.extra_content() == True
diff --git a/tests/test_ft_defaults.py b/tests/test_ft_defaults.py
index 177570de..1fbeee1a 100644
--- a/tests/test_ft_defaults.py
+++ b/tests/test_ft_defaults.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_defaults():
diff --git a/tests/test_ft_docs.py b/tests/test_ft_docs.py
index 90a8ca52..669fa3e3 100644
--- a/tests/test_ft_docs.py
+++ b/tests/test_ft_docs.py
@@ -1,5 +1,5 @@
 import inspect
-from rpytest import ft
+from swtest import ft
 
 
 def test_docstrings_enum():
@@ -29,7 +29,7 @@ def test_docstrings_cls():
 def test_docstrings_meth():
     assert inspect.getdoc(ft.DocClass.fn) == inspect.cleandoc(
         """
-        fn(self: rpytest.ft._rpytest_ft.DocClass) -> None
+        fn(self: swtest.ft._ft.DocClass) -> None
         
         Function with docstring for good measure
         """
@@ -39,7 +39,7 @@ def test_docstrings_meth():
 def test_docstrings_meth_kwd():
     assert inspect.getdoc(ft.DocClass.fn2) == inspect.cleandoc(
         """
-        fn2(self: rpytest.ft._rpytest_ft.DocClass, from_: typing.SupportsInt) -> None
+        fn2(self: swtest.ft._ft.DocClass, from_: typing.SupportsInt) -> None
         
         Function with parameter that's a python keyword
 
@@ -51,7 +51,7 @@ def test_docstrings_meth_kwd():
 def test_docstrings_meth_rename():
     assert inspect.getdoc(ft.DocClass.fn3) == inspect.cleandoc(
         """
-        fn3(self: rpytest.ft._rpytest_ft.DocClass, ohai: typing.SupportsInt) -> None
+        fn3(self: swtest.ft._ft.DocClass, ohai: typing.SupportsInt) -> None
         
         Function with renamed parameter
 
@@ -104,7 +104,7 @@ def test_docstrings_append():
     )
     assert inspect.getdoc(ft.DocAppendClass.fn) == inspect.cleandoc(
         """
-        fn(self: rpytest.ft._rpytest_ft.DocAppendClass) -> None
+        fn(self: swtest.ft._ft.DocAppendClass) -> None
         
         Function with docstring for good measure
         Useful extra information about this fn
diff --git a/tests/test_ft_enums.py b/tests/test_ft_enums.py
index 663f89ac..849f14a2 100644
--- a/tests/test_ft_enums.py
+++ b/tests/test_ft_enums.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 
 
@@ -12,8 +12,8 @@ def test_enums():
 
     # Unnamed enums are hoisted as integers to their scope
     # - not supported yet for globals
-    # assert ft._rpytest_ft.UGEX == 7
-    # assert ft._rpytest_ft.NSUGEX == 5
+    # assert ft._ft.UGEX == 7
+    # assert ft._ft.NSUGEX == 5
     assert ft.EnumContainer.UEX == 4
 
     # enum class are specific types
diff --git a/tests/test_ft_fields.py b/tests/test_ft_fields.py
index 1ba0cfd8..6ed5cf44 100644
--- a/tests/test_ft_fields.py
+++ b/tests/test_ft_fields.py
@@ -1,4 +1,4 @@
-from rpytest.ft import ClassWithFields, StructWithBitfields
+from swtest.ft import ClassWithFields, StructWithBitfields
 
 
 def test_fields():
diff --git a/tests/test_ft_ignore.py b/tests/test_ft_ignore.py
index bdcaf271..f88f75f7 100644
--- a/tests/test_ft_ignore.py
+++ b/tests/test_ft_ignore.py
@@ -1,14 +1,14 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_ignore_fn():
-    assert not hasattr(ft._rpytest_ft, "fnIgnore")
+    assert not hasattr(ft._ft, "fnIgnore")
     assert ft.fnIgnoredParam() == 3
 
 
 def test_ignore_cls():
-    assert not hasattr(ft._rpytest_ft, "IgnoredClass")
-    assert not hasattr(ft._rpytest_ft, "IgnoredClassWithEnum")
+    assert not hasattr(ft._ft, "IgnoredClass")
+    assert not hasattr(ft._ft, "IgnoredClassWithEnum")
 
     c = ft.ClassWithIgnored()
 
@@ -25,7 +25,7 @@ def test_ignore_cls_enum():
 
 
 def test_ignored_enums():
-    assert not hasattr(ft._rpytest_ft, "IgnoredEnum")
+    assert not hasattr(ft._ft, "IgnoredEnum")
 
     assert not hasattr(ft.EnumWithIgnored, "Ignored")
     assert ft.EnumWithIgnored.NotIgnored == 1
@@ -37,19 +37,19 @@ def test_ignored_enums():
 
 
 def test_ignored_by_default_fn():
-    assert not hasattr(ft._rpytest_ft, "id_fnIgnore")
-    assert ft._rpytest_ft.id_fnEnable() == 2
+    assert not hasattr(ft._ft, "id_fnIgnore")
+    assert ft._ft.id_fnEnable() == 2
 
 
 def test_ignored_by_default_enum():
-    assert not hasattr(ft._rpytest_ft, "id_IgnoredEnum")
-    assert ft._rpytest_ft.id_EnabledEnum.Param3 == 3
+    assert not hasattr(ft._ft, "id_IgnoredEnum")
+    assert ft._ft.id_EnabledEnum.Param3 == 3
 
 
 def test_ignored_by_default_class():
-    assert not hasattr(ft._rpytest_ft, "id_IgnoreClass")
-    o = ft._rpytest_ft.id_EnabledClass()
+    assert not hasattr(ft._ft, "id_IgnoreClass")
+    o = ft._ft.id_EnabledClass()
     assert o.fn() == 3
     assert o.fn_missing() == 4
-    assert ft._rpytest_ft.id_EnabledClass.InnerEnum.Param6 == 6
-    assert ft._rpytest_ft.id_EnabledClass.InnerEnumMissing.Param7 == 7
+    assert ft._ft.id_EnabledClass.InnerEnum.Param6 == 6
+    assert ft._ft.id_EnabledClass.InnerEnumMissing.Param7 == 7
diff --git a/tests/test_ft_inheritance.py b/tests/test_ft_inheritance.py
index 37de6440..30a84cd5 100644
--- a/tests/test_ft_inheritance.py
+++ b/tests/test_ft_inheritance.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 
 import inspect
diff --git a/tests/test_ft_keepalive.py b/tests/test_ft_keepalive.py
index 95bd3146..004dd6fb 100644
--- a/tests/test_ft_keepalive.py
+++ b/tests/test_ft_keepalive.py
@@ -1,7 +1,7 @@
 import gc
 import sys
 
-from rpytest import ft
+from swtest import ft
 
 
 def test_ft_autokeepalive():
diff --git a/tests/test_ft_lifetime.py b/tests/test_ft_lifetime.py
index 84b3d4cc..97d86ff7 100644
--- a/tests/test_ft_lifetime.py
+++ b/tests/test_ft_lifetime.py
@@ -1,4 +1,4 @@
-from rpytest.ft import LTTester, LTWithVirtual
+from swtest.ft import LTTester, LTWithVirtual
 import gc
 
 
diff --git a/tests/test_ft_misc.py b/tests/test_ft_misc.py
index 77193eff..371233c5 100644
--- a/tests/test_ft_misc.py
+++ b/tests/test_ft_misc.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 import re
 
@@ -161,7 +161,7 @@ def test_cpp_code_with_constant():
 
 
 def test_ns_class():
-    assert ft._rpytest_ft.NSClass().getN() == 4
+    assert ft._ft.NSClass().getN() == 4
 
 
 #
@@ -296,4 +296,4 @@ def __init__(self):
 
 
 def test_subpkg():
-    from rpytest.ft.subpkg import SPClass
+    from swtest.ft.subpkg import SPClass
diff --git a/tests/test_ft_overloaded.py b/tests/test_ft_overloaded.py
index e36a2478..6874d872 100644
--- a/tests/test_ft_overloaded.py
+++ b/tests/test_ft_overloaded.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_overloaded():
diff --git a/tests/test_ft_overloads.py b/tests/test_ft_overloads.py
index a2326377..20c870da 100644
--- a/tests/test_ft_overloads.py
+++ b/tests/test_ft_overloads.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_fn_overloads():
diff --git a/tests/test_ft_parameters.py b/tests/test_ft_parameters.py
index 602edbf9..7943d3e7 100644
--- a/tests/test_ft_parameters.py
+++ b/tests/test_ft_parameters.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 
 
diff --git a/tests/test_ft_protection.py b/tests/test_ft_protection.py
index 95fcc426..4e933a21 100644
--- a/tests/test_ft_protection.py
+++ b/tests/test_ft_protection.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 
 
diff --git a/tests/test_ft_refquals.py b/tests/test_ft_refquals.py
index 8356435e..89624376 100644
--- a/tests/test_ft_refquals.py
+++ b/tests/test_ft_refquals.py
@@ -1,4 +1,4 @@
-import rpytest.ft._rpytest_ft as ft
+import swtest.ft._ft as ft
 
 
 def test_refquals_ref():
diff --git a/tests/test_ft_rename.py b/tests/test_ft_rename.py
index 250b6305..fda87420 100644
--- a/tests/test_ft_rename.py
+++ b/tests/test_ft_rename.py
@@ -1,14 +1,14 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_rename_fn():
-    assert not hasattr(ft._rpytest_ft, "fnOriginal")
+    assert not hasattr(ft._ft, "fnOriginal")
     assert ft.fnRenamed() == 0x1
     assert ft.fnRenamedParam(y=4) == 4
 
 
 def test_rename_cls():
-    assert not hasattr(ft._rpytest_ft, "OriginalClass")
+    assert not hasattr(ft._ft, "OriginalClass")
 
     c = ft.RenamedClass()
 
@@ -36,7 +36,7 @@ def test_rename_cls():
 
 
 def test_rename_enums():
-    assert not hasattr(ft._rpytest_ft, "OriginalEnum")
+    assert not hasattr(ft._ft, "OriginalEnum")
 
     assert not hasattr(ft.RenamedEnum, "Original1")
     assert ft.RenamedEnum.Renamed1 == 1
diff --git a/tests/test_ft_templates.py b/tests/test_ft_templates.py
index 9cc1af02..a00db1be 100644
--- a/tests/test_ft_templates.py
+++ b/tests/test_ft_templates.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_basic_template():
diff --git a/tests/test_ft_trampoline.py b/tests/test_ft_trampoline.py
index 35fe0f49..6c222324 100644
--- a/tests/test_ft_trampoline.py
+++ b/tests/test_ft_trampoline.py
@@ -1,4 +1,5 @@
-from rpytest.ft import ClassWithTrampoline, ConstexprTrampoline
+from swtest_base._module import abaseclass
+from swtest.ft._ft import ClassWithTrampoline, ConstexprTrampoline, RemoteTrampoline
 
 
 def test_class_with_trampoline():
@@ -29,3 +30,13 @@ def fnWithMoveOnlyParam(self, i):
 
 def test_constexpr_trampoline():
     ConstexprTrampoline()
+
+
+def test_remote_trampoline():
+    a = abaseclass()
+    assert a.fn() == "abaseclass"
+
+    r = RemoteTrampoline()
+    assert r.fn() == "RemoteTrampoline"
+
+    assert isinstance(r, abaseclass)
diff --git a/tests/test_ft_typecaster.py b/tests/test_ft_typecaster.py
index 6efd1b01..ed897778 100644
--- a/tests/test_ft_typecaster.py
+++ b/tests/test_ft_typecaster.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 import pytest
 
 #
diff --git a/tests/test_multiple_virtual_inheritance.py b/tests/test_multiple_virtual_inheritance.py
index 9c73c6c8..666a2665 100644
--- a/tests/test_multiple_virtual_inheritance.py
+++ b/tests/test_multiple_virtual_inheritance.py
@@ -1,4 +1,4 @@
-from rpytest import ft
+from swtest import ft
 
 
 def test_another_diamond_b():
diff --git a/tests/test_srconly.py b/tests/test_srconly.py
deleted file mode 100644
index 7a61e164..00000000
--- a/tests/test_srconly.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# just ensures the source-only module built correctly, nothing fancy
-import rpytest.srconly
-
-
-def test_srconly_fn():
-    assert rpytest.srconly.srconly_fn(3) == 3 - 0x42