問題

disposal_event = (
select(
EventDB.animal_uuid,
EventDB.event_date,
func.row_number().over(
partition_by=EventDB.animal_uuid,
order_by=EventDB.event_date.desc(),
).label('row_number')
)
.where(EventDB.event_type == EventTypeEnum.disposal_event, ~EventDB.is_edit, ~EventDB.is_delete)
.subquery()
)
weight_log = (
select(
WeightLogDB.animal_uuid,
WeightLogDB.live_weight,
WeightLogDB.event_date,
func.row_number().over(
partition_by=WeightLogDB.animal_uuid,
order_by=WeightLogDB.live_weight.desc()
).label('row_number')
)
.subquery()
)
query = (
select(SheepDB)
.add_columns(
disposal_event.c.event_date.label('disposal_date'),
weight_log.c.event_date.label('weight_max_date'),
weight_log.c.live_weight.label('weight_max'),
func.extract('year', func.age(weight_log.c.event_date, SheepDB.birth_day)).label('year_max_weight'),
func.extract('month', func.age(weight_log.c.event_date, SheepDB.birth_day)).label('month_max_weight'),
)
.options(
selectinload(SheepDB.birth_country),
selectinload(SheepDB.birth_farm),
selectinload(SheepDB.sex),
selectinload(SheepDB.birth_type),
selectinload(SheepDB.insemination_method),
)
.where(SheepDB.location_uuid.is_not(None))
.outerjoin(disposal_event, and_(disposal_event.c.animal_uuid == SheepDB.uuid, disposal_event.c.row_number == 1))
.outerjoin(weight_log, and_(weight_log.c.animal_uuid == SheepDB.uuid, weight_log.c.row_number == 1))
)
if search:
query = query.filter(
or_(
SheepDB.number_unsm.ilike(f'%{search}%'),
SheepDB.number_harriot.ilike(f'%{search}%')
)
)
return query

@animal_router.get(
animal_url.animal_list_all_locations,
response_model=PaginatePage[AncestorForAddEventListSchema],
responses=responses(
PaginatePage[AncestorForAddEventListSchema],
statuses=[status.HTTP_400_BAD_REQUEST, status.HTTP_404_NOT_FOUND],
),
description='Animals list all locations',
)
async def animal_all_locations_list(
search: Annotated[str | None, Query(description='Search field by number_unsm, number_harriot')] = None,
_: UserDB = Depends(PermissionManager(ANIMAL_DB, ActionEnum.get_list)),
session: AsyncSession = Depends(get_async_session),
) -> PaginatePage[AncestorForAddEventListSchema]:
result = await AnimalSession(session).get_animal_all_locations(search)
return await paginate(session, result)

class AncestorForAddEventListSchema(BaseSchema):
"""Ancestor for add event list schema."""
uuid: UUID
birth_country: UUIDNameSchema | None
birth_farm: UUIDNameSchema | None
sex: UUIDNameSchema
number_unsm: str | None
number_inv: str | None
number_electronic_tag: str | None
number_harriot: str | None
number_foreign: str | None
birth_day: date | None
birth_type: UUIDNameSchema | None
insemination_method: UUIDNameSchema | None
disposal_date: date | None
weight_max: float | None
weight_max_date: date | None
month_max_weight: int | None

{
"type": "ValidationError",
"message": "120 validation errors for Page[TypeVar]Customized[AncestorForAddEventListSchema]\nitems.0.uuid\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.birth_country\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.birth_farm\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.sex\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.number_unsm\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.number_inv\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.number_electronic_tag\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.number_harriot\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.number_foreign\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.birth_day\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.birth_type\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.0.insemination_method\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.1.uuid\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.1.birth_country\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.1.birth_farm\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://errors.pydantic.dev/2.8/v/missing\nitems.1.sex\n Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]\n For further information visit https://err
"traceback": [
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/starlette/_exception_handler.py",
"line": 53,
"function": "wrapped_app",
"code": "await app(scope, receive, sender)"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/starlette/routing.py",
"line": 72,
"function": "app",
"code": "response = await func(request)"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi/routing.py",
"line": 278,
"function": "app",
"code": "raw_response = await run_endpoint_function("
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi/routing.py",
"line": 191,
"function": "run_endpoint_function",
"code": "return await dependant.call(**values)"
},
{
"file": "/Users/antongromov/projects/back/src/animal/routers.py",
"line": 212,
"function": "animal_all_locations_list",
"code": "return await paginate(session, result)"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/sqlalchemy/util/_concurrency_py3k.py",
"line": 203,
"function": "greenlet_spawn",
"code": "result = context.switch(value)"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi_pagination/ext/sqlalchemy.py",
"line": 186,
"function": "exec_pagination",
"code": "return create_page("
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi_pagination/api.py",
"line": 182,
"function": "create_page",
"code": "return _page_val.get().create(items, **kwargs)"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi_pagination/default.py",
"line": 73,
"function": "create",
"code": "return create_pydantic_model("
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/fastapi_pagination/utils.py",
"line": 171,
"function": "create_pydantic_model",
"code": "return model_cls.model_validate(kwargs, from_attributes=True) # type: ignore"
},
{
"file": "/Users/antongromov/projects/back/venv/lib/python3.12/site-packages/pydantic/main.py",
"line": 568,
"function": "model_validate",
"code": "return cls.pydantic_validator.validate_python("
}
]
}

почему такая ошибка что делаю не так?

思考

Explaining the error and solutions

The error is a mismatch between the Pydantic schema and the query output, which is of type Row instead of the expected model.

The cause: ext/sqlalchemy paginate expects a select(SheepDB) query format. .add_columns introduces a row, making it incompatible.

Solution:

  1. Remove .add_columns and use select(SheepDB).
  2. Or create a custom query without ext/sqlalchemy, manually converting to schema.
  3. Alternatively, pass a transformer to shape the Row into the expected model.

Sample code can be provided for options 2 or 3.

Providing custom code snippet

Given the user's possible preference for customizing the code, approach 2 seems like an easy fit. Here's a sample:

After adjusting the import path and ensuring the right model alias (PaginatePage for Page), the response model works with this custom approach. The user should be comfortable adjusting imports, and using Page (from fastapi_pagination) will integrate well with the route's response_model.

Clarifying pagination approach

It seems the user has likely customized PaginatePage alias or set it up via add_pagination(app). However, the default pagination type is Page, and both generic pagination and ext/sqlalchemy use the same underlying logic. It's important to mention that while generic pagination loads all results and handles pagination in Python, ext/sqlalchemy applies LIMIT and OFFSET, which might be more efficient for large datasets. Avoiding overcomplication, I'll prepare a concise explanation and provide two potential fixes.

Thought for 2m 38s

回答

Коротко: paginate возвращает строки (Row), а Pydantic ждёт AncestorForAddEventListSchema, поэтому он не находит поля (uuid, birth_country и т.д.) и валится с Field required.


Что именно происходит

Смотрим на запрос:

python
query = ( select(SheepDB) .add_columns( disposal_event.c.event_date.label('disposal_date'), weight_log.c.event_date.label('weight_max_date'), weight_log.c.live_weight.label('weight_max'), func.extract('year', func.age(weight_log.c.event_date, SheepDB.birth_day)).label('year_max_weight'), func.extract('month', func.age(weight_log.c.event_date, SheepDB.birth_day)).label('month_max_weight'), ) ... )

Такой select(SheepDB).add_columns(...) возвращает не объект модели, а строку SQLAlchemy:

python
(<SheepDB instance>, disposal_date, weight_max_date, weight_max, year_max_weight, month_max_weight)

Это прямо видно в ошибке:

text
items.0.uuid Field required [type=missing, input_value=(<src.animal.models.Sheep... None, None, None, None), input_type=Row]

input_value — это Row, а не SheepDB и не AncestorForAddEventListSchema.

Дальше:

python
response_model=PaginatePage[AncestorForAddEventListSchema] ... return await paginate(session, result)

fastapi-pagination для SQLAlchemy:

  1. делает запрос,
  2. получает список Row,
  3. передаёт его в Pydantic как items: list[AncestorForAddEventListSchema].

Pydantic пытается построить AncestorForAddEventListSchema из одного объекта Row.
С точки зрения Pydantic у этого Row:

  • нет атрибута .uuid
  • нет .birth_country, .birth_farm, и т.д.

Есть только позиционные элементы (row[0], row[1], …) и мэппинг row._mapping. Поэтому он и пишет: Field required / missing.


Плюс ещё один момент

Вы выбираете колонку:

python
func.extract('year', func.age(...)).label('year_max_weight')

но в AncestorForAddEventListSchema нет поля year_max_weight. Это само по себе не ломает валидацию (Pydantic просто не использует лишние данные), но логически это странно: либо поле нужно в схеме, либо колонку из селекта стоит убрать.


Как это исправить

Есть несколько вариантов. Покажу два самых практичных.


Вариант 1. Использовать transformer у paginate (рекомендуемый)

Оставляем SQL-запрос как есть, но говорим fastapi-pagination, как превратить RowAncestorForAddEventListSchema.

Пример (идея, подгони под свои названия импортов):

python
from fastapi_pagination.ext.sqlalchemy import paginate as sqlalchemy_paginate @animal_router.get( animal_url.animal_list_all_locations, response_model=PaginatePage[AncestorForAddEventListSchema], ... ) async def animal_all_locations_list( search: Annotated[str | None, Query(description='Search field by number_unsm, number_harriot')] = None, _: UserDB = Depends(PermissionManager(ANIMAL_DB, ActionEnum.get_list)), session: AsyncSession = Depends(get_async_session), ) -> PaginatePage[AncestorForAddEventListSchema]: query = await AnimalSession(session).get_animal_all_locations(search) def transform(items): # каждая строка: (sheep, disposal_date, weight_max_date, weight_max, year_max_weight, month_max_weight) result: list[AncestorForAddEventListSchema] = [] for sheep, disposal_date, weight_max_date, weight_max, year_max_weight, month_max_weight in items: result.append( AncestorForAddEventListSchema( uuid=sheep.uuid, birth_country=sheep.birth_country, birth_farm=sheep.birth_farm, sex=sheep.sex, number_unsm=sheep.number_unsm, number_inv=sheep.number_inv, number_electronic_tag=sheep.number_electronic_tag, number_harriot=sheep.number_harriot, number_foreign=sheep.number_foreign, birth_day=sheep.birth_day, birth_type=sheep.birth_type, insemination_method=sheep.insemination_method, disposal_date=disposal_date, weight_max=weight_max, weight_max_date=weight_max_date, month_max_weight=month_max_weight, # если нужно, добавь поле year_max_weight в схему и сюда тоже ) ) return result # transformer как раз и предназначен для такой истории return await sqlalchemy_paginate(session, query, transformer=transform)

Смысл:

  • paginate сначала делает SELECT и LIMIT/OFFSET в БД,
  • получает items как список Row,
  • применяет transform(items) и уже список AncestorForAddEventListSchema отдаёт в Pydantic — валидация проходит.

Вариант 2. Не использовать SQLAlchemy extension, а пагинировать уже готовый список

Если данных немного и не страшно тащить всё в память, можно вообще не использовать fastapi_pagination.ext.sqlalchemy, а обычный fastapi_pagination.paginate:

python
from fastapi_pagination import paginate as paginate_list @animal_router.get( animal_url.animal_list_all_locations, response_model=PaginatePage[AncestorForAddEventListSchema], ... ) async def animal_all_locations_list(...): query = await AnimalSession(session).get_animal_all_locations(search) result = await session.execute(query) rows = result.all() items = [ AncestorForAddEventListSchema( uuid=sheep.uuid, birth_country=sheep.birth_country, birth_farm=sheep.birth_farm, sex=sheep.sex, number_unsm=sheep.number_unsm, number_inv=sheep.number_inv, number_electronic_tag=sheep.number_electronic_tag, number_harriot=sheep.number_harriot, number_foreign=sheep.number_foreign, birth_day=sheep.birth_day, birth_type=sheep.birth_type, insemination_method=sheep.insemination_method, disposal_date=disposal_date, weight_max=weight_max, weight_max_date=weight_max_date, month_max_weight=month_max_weight, ) for sheep, disposal_date, weight_max_date, weight_max, year_max_weight, month_max_weight in rows ] return paginate_list(items)

Тут уже в paginate_list попадает нормальный список готовых Pydantic-моделей, так что валидация ок.

Минус: лимиты/смещения считаются в памяти, а не в БД — для больших таблиц это плохо по производительности, поэтому вариант 1 обычно лучше.


Итог

Ошибка не в самой схеме и не в selectinload, а в том, что:

  • paginate(session, query) + select(SheepDB).add_columns(...) ⇒ элементы результата — Row,
  • но response_model=PaginatePage[AncestorForAddEventListSchema] ⇒ Pydantic ждёт каждый элемент как AncestorForAddEventListSchema, а Row под это не подходит.

Нужно либо:

  1. Явно преобразовать RowAncestorForAddEventListSchema (через transformer или вручную до вызова paginate),
  2. Либо не добавлять дополнительные колонки в select, а получать их как свойства/column_property у модели и маппить уже из SheepDB.

Если хочешь, могу помочь переписать модель SheepDB, чтобы disposal_date и weight_max были вычисляемыми полями на уровне ORM, и тогда запрос станет обычным select(SheepDB).

分享這個問答