Skip to content

Instantly share code, notes, and snippets.

@bollwyvl
Last active December 14, 2025 16:42
Show Gist options
  • Select an option

  • Save bollwyvl/ead82f859cb696bce8ea7b945cdc538f to your computer and use it in GitHub Desktop.

Select an option

Save bollwyvl/ead82f859cb696bce8ea7b945cdc538f to your computer and use it in GitHub Desktop.
"""(re-)generate the litestar multi-output recipe based on `pyproject.toml`
Invoke this locally from the root of the feedstock, assuming `tomli`, `jinja2`, and `packaging`:
python test_recipe.py --update
git commit -m "updated recipe with test_recipe.py"
conda smithy rerender
If not updating, parameter will fail if new `[extra]`s are added, or
dependencies change.
This tries to work with the conda-forge autotick bot by reading updates from
`meta.yml`:
- build_number
- version
- sha256sum
If running locally against a non-bot-requested version, you'll probably need
to update those fields in `meta.yaml`.
If some underlying project data changed e.g. the `path_to-the_tarball`, update
`TEMPLATE` below and re-run.
"""
import os
import re
import sys
import tempfile
import tarfile
from pathlib import Path
from urllib.request import urlretrieve
import difflib
import jinja2
import tomli
from packaging.requirements import Requirement
TEMPLATE = """
# yaml-language-server: $schema=https://raw.githubusercontent.com/prefix-dev/recipe-format/main/schema.json
schema_version: 1
context:
version: "<< version >>"
python_check_max: "3.14"
recipe:
name: litestar-split
version: ${{ version }}
source:
url: https://pypi.org/packages/source/l/litestar/litestar-${{ version }}.tar.gz
# the SHA256 gets updated by the bot
sha256: << sha256_sum >>
build:
# the build number gets reset by the bot
number: << build_number >>
noarch: python
outputs:
- package:
name: litestar
build:
noarch: python
script:
- ${{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation --disable-pip-version-check
python:
entry_points:
- litestar = litestar.__main__:run_cli
requirements:
host:
- hatchling
- pip
- python ${{ python_min }}.*
run:
- annotated-types
- python >=${{ python_min }}<% for dep in core_deps %>
- << dep >>
<%- endfor %>
tests:
- python:
imports: litestar
pip_check: true
python_version:
- ${{ python_min }}.*
- ${{ python_check_max }}.*
<% for extra, extra_deps in extra_outputs.items() %>
- package:
name: litestar-with-<< extra >>
build:
noarch: generic
requirements:
run:
- ${{ pin_subpackage("litestar", exact=True) }}<% for dep in extra_deps %>
- << dep >>
<%- endfor %>
tests:
- python:
imports: << extra_test_imports[extra] if extra in extra_test_imports else "litestar" >>
pip_check: true
python_version:
- ${{ python_min }}.*
- << '"' + py_max[extra] + '.*"' if extra in py_max else '${{ python_check_max }}.*' >>
<%- if extra in extra_test_commands %>
- requirements:
run:
- python ${{ python_min }}.*
script:
- << extra_test_commands[extra] >><%- endif %>
about:
summary: Light-weight and flexible ASGI API Framework (with [<< extra >>])
<% endfor %>
- package:
name: litestar-with-full
build:
noarch: generic
requirements:
run:
- ${{ pin_subpackage("litestar", exact=True) }}
<%- for extra, extra_deps in extra_outputs.items() %>
- ${{ pin_subpackage("litestar-with-<< extra >>", exact=True) }}
<%- endfor %>
tests:
- python:
imports: litestar
pip_check: true
python_version:
- ${{ python_min }}.*
- << '${{ python_check_max }}.*' if not py_max else '"' + (py_max.values() | min) + '.*"' >>
about:
summary: Light-weight and flexible ASGI API Framework (with all [extras])
about:
summary: Light-weight and flexible ASGI API Framework
license: MIT
license_file: LICENSE
homepage: https://litestar.dev
repository: https://github.com/litestar-org/litestar
documentation: https://docs.litestar.dev
extra:
feedstock-name: litestar
recipe-maintainers:
- bollwyvl
- thewchan
"""
DELIMIT = dict(
# use alternate template delimiters to avoid conflicts
block_start_string="<%",
block_end_string="%>",
variable_start_string="<<",
variable_end_string=">>",
)
DEV_URL = "https://github.com/litestar-org/litestar"
#: assume running locally
HERE = Path(__file__).parent
WORK_DIR = HERE / "recipe"
SRC_DIR = Path(os.environ["SRC_DIR"]) if "SRC_DIR" in os.environ else None
#: assume inside conda-build
if "RECIPE_DIR" in os.environ:
WORK_DIR = Path(os.environ["RECIPE_DIR"])
RECIPE = WORK_DIR / "recipe.yaml"
CURRENT_RECIPE_TEXT = RECIPE.read_text(encoding="utf-8")
#: read the version from what the bot might have updated
try:
VERSION = re.findall(r' version: "([^"]*)"', CURRENT_RECIPE_TEXT)[0].strip()
SHA256_SUM = re.findall(r" sha256: ([\S]*)", CURRENT_RECIPE_TEXT)[0].strip()
BUILD_NUMBER = re.findall(r" number: ([\S]*)", CURRENT_RECIPE_TEXT)[0].strip()
except Exception as err:
print(CURRENT_RECIPE_TEXT)
print(f"!!! failed to find version info in above {RECIPE}")
print(err)
sys.exit(1)
#: instead of cloning the whole repo, just download tarball
TARBALL_URL = f"{DEV_URL}/archive/refs/tags/v{VERSION}.tar.gz"
#: the path to `pyproject.toml` in the tarball
#: at present, this is the only place where the name change has an impact,
#: but will soon be pervasive on the 2.0.x line
PYPROJECT_TOML = f"litestar-{VERSION}/pyproject.toml"
#: despite claiming optional, these end up as hard `Requires-Dist`
KNOWN_REQS = [
"mako",
]
#: these are handled externally
KNOWN_SKIP = [
"python",
]
#: handle lack of conda support for [extras]
TRANSFORM_EXTRA_DEP = {
("uvicorn", ("standard",)): "uvicorn-standard",
("redis-py", ("hiredis",)): "redis-py",
}
#: handle transient extras incurred, keyed by post-transform names
EXTRA_EXTRA_DEPS = {
# https://github.com/redis/redis-py/blob/v4.5.3/setup.py#L57
"redis-py": ["hiredis >=1.0.0"],
"uvicorn": ["uvicorn-standard"]
}
#: a meaningful import that isn't caught
EXTRA_TEST_IMPORTS = {
"attrs": "litestar.contrib.attrs",
"cli": "litestar.cli.main",
"cryptography": "litestar.middleware.session.client_side",
"htmx": "litestar.plugins.htmx",
"jinja": "litestar.contrib.jinja",
"jwt": "litestar.contrib.jwt.jwt_token",
"mako": "litestar.contrib.mako",
"minijinja": "litestar.contrib.minijinja",
"opentelemetry": "litestar.contrib.opentelemetry",
"piccolo": "litestar.contrib.piccolo",
"picologging": "litestar.logging.picologging",
"pydantic": "litestar.contrib.pydantic",
"prometheus": "litestar.contrib.prometheus",
"redis": "litestar.stores.redis",
"sqlalchemy": "litestar.plugins.sqlalchemy",
"structlog": "litestar.middleware.logging",
}
#: commands to run after `pip check`
EXTRA_TEST_COMMANDS = {
"cli": "litestar --help",
}
#: some extras may become temporarily broken: add them here to skip
SKIP_EXTRAS = [
# re-built manually
"full",
# several levels of missing deps
"piccolo",
"sqlalchemy",
# many missing deps, very tight pins on security packages
"valkey"
]
REPLACE_DEPS = {
"importlib-resources": "importlib_resources",
"prometheus-client": "prometheus_client",
"redis": "redis-py",
"typing-extensions": "typing_extensions",
}
PY_MIN = "3.10"
PY_MAX = {
"picologging": "3.12"
}
def reqtify(raw: str):
"""Split dependency into conda requirement"""
dep = Requirement(raw)
name = f"{dep.name}"
spec = f"{dep.specifier}"
if dep.marker and not dep.marker.evaluate({"python_version": PY_MIN}):
print(f"... skipping '{raw}' because 'python_version >={PY_MIN}'")
return []
has_tilde = "~" in spec
has_caret = "^=" in spec
if has_tilde or has_caret:
strip_bits = 2 if has_tilde else 3
bits = spec[strip_bits:].split(".")
min_ = ".".join(bits)
max_base, max_last = bits[:-strip_bits], str(int(bits[-strip_bits]) + 1)
max_ = ".".join([*max_base, max_last])
spec = f">={min_},<{max_}"
if "," in spec:
spec = ",".join(reversed(sorted(spec.split(","))))
name = REPLACE_DEPS.get(name, name)
final = f"{name} {spec}".lower()
if final.replace(" ", "") != raw.replace(" ", ""):
print("... normalizing\n\t", raw, "\n\t", final)
return [final.strip(), ]
def preflight_recipe():
"""check the recipe first"""
print("version:", VERSION)
print("sha256: ", SHA256_SUM)
print("number: ", BUILD_NUMBER)
assert VERSION, "no meta.yaml#/package/version detected"
assert SHA256_SUM, "no meta.yaml#/source/sha256 detected"
assert BUILD_NUMBER, "no meta.yaml#/build/number detected"
print("information from the recipe looks good!", flush=True)
def get_pyproject_data():
"""fetch the pyproject.toml data"""
if SRC_DIR:
print(f"reading pyprojec.toml from {TARBALL_URL}...")
return tomli.loads((SRC_DIR / "pyproject.toml").read_text(encoding="utf-8"))
print(f"reading pyproject.toml from {TARBALL_URL}...")
with tempfile.TemporaryDirectory() as td:
tdp = Path(td)
tarpath = tdp / Path(TARBALL_URL).name
urlretrieve(TARBALL_URL, tarpath)
with tarfile.open(tarpath, "r:gz") as tf:
return tomli.load(tf.extractfile(PYPROJECT_TOML))
def verify_recipe(update=False):
"""check or update a recipe based on the `pyproject.toml` data"""
check = not update
preflight_recipe()
pyproject = get_pyproject_data()
deps = pyproject["project"]["dependencies"]
core_deps = sorted(sum([reqtify(d_spec) for d_spec in deps], []))
extras = pyproject["project"]["optional-dependencies"]
extra_outputs = {
extra: sorted(sum([reqtify(d_spec) for d_spec in extra_deps], []))
for extra, extra_deps in extras.items()
if extra not in SKIP_EXTRAS
}
extra_outputs = {
extra: sorted(
sum([EXTRA_EXTRA_DEPS.get(dep.split(" ")[0], []) for dep in deps], deps)
)
for extra, deps in extra_outputs.items()
}
context = dict(
version=VERSION,
build_number=BUILD_NUMBER,
sha256_sum=SHA256_SUM,
extra_outputs=extra_outputs,
core_deps=core_deps,
extra_test_imports=EXTRA_TEST_IMPORTS,
extra_test_commands=EXTRA_TEST_COMMANDS,
py_max=PY_MAX
)
old_text = RECIPE.read_text(encoding="utf-8")
template = jinja2.Template(TEMPLATE, **DELIMIT)
new_text = template.render(**context).strip() + "\n"
if check:
if new_text.strip() != old_text.strip():
print(f"{RECIPE} is not up-to-date:")
print(
"\n".join(
difflib.unified_diff(
old_text.splitlines(),
new_text.splitlines(),
RECIPE.name,
f"{RECIPE.name} (updated)",
)
)
)
print("either apply the above patch, or run locally:")
print("\n\tpython test_recipe.py --update\n")
return 1
else:
RECIPE.write_text(new_text, encoding="utf-8")
return 0
if __name__ == "__main__":
sys.exit(verify_recipe(update="--update" in sys.argv))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment