Cache executor to avoid hitting open file limits (#4560)

Fixes #4504, fixes #3251
This commit is contained in:
Shantanu 2025-01-25 09:28:06 -08:00 committed by GitHub
parent c0b92f3888
commit 99dbf3006b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -2,7 +2,7 @@
import logging import logging
from concurrent.futures import Executor, ProcessPoolExecutor from concurrent.futures import Executor, ProcessPoolExecutor
from datetime import datetime, timezone from datetime import datetime, timezone
from functools import partial from functools import cache, partial
from multiprocessing import freeze_support from multiprocessing import freeze_support
try: try:
@ -85,12 +85,16 @@ def main(bind_host: str, bind_port: int) -> None:
web.run_app(app, host=bind_host, port=bind_port, handle_signals=True, print=None) web.run_app(app, host=bind_host, port=bind_port, handle_signals=True, print=None)
@cache
def executor() -> Executor:
return ProcessPoolExecutor()
def make_app() -> web.Application: def make_app() -> web.Application:
app = web.Application( app = web.Application(
middlewares=[cors(allow_headers=(*BLACK_HEADERS, "Content-Type"))] middlewares=[cors(allow_headers=(*BLACK_HEADERS, "Content-Type"))]
) )
executor = ProcessPoolExecutor() app.add_routes([web.post("/", partial(handle, executor=executor()))])
app.add_routes([web.post("/", partial(handle, executor=executor))])
return app return app