- 新增图像生成接口,支持试用、积分和自定义API Key模式 - 实现生成图片结果异步上传至MinIO存储,带重试机制 - 优化积分预扣除和异常退还逻辑,保障用户积分准确 - 添加获取生成历史记录接口,支持时间范围和分页 - 提供本地字典配置接口,支持模型、比例、提示模板和尺寸 - 实现图片批量上传接口,支持S3兼容对象存储 feat(admin): 增加管理员角色管理与权限分配接口 - 实现角色列表查询、角色创建、更新及删除功能 - 增加权限列表查询接口 - 实现用户角色分配接口,便于统一管理用户权限 - 增加系统字典增删查改接口,支持分类过滤和排序 - 权限控制全面覆盖管理接口,保证安全访问 feat(auth): 完善用户登录注册及权限相关接口与页面 - 实现手机号验证码发送及校验功能,保障注册安全 - 支持手机号注册、登录及退出接口,集成日志记录 - 增加修改密码功能,验证原密码后更新 - 提供动态导航菜单接口,基于权限展示不同菜单 - 实现管理界面路由及日志、角色、字典管理页面访问权限控制 - 添加系统日志查询接口,支持关键词和等级筛选 feat(app): 初始化Flask应用并配置蓝图与数据库 - 创建应用程序工厂,加载配置,初始化数据库和Redis客户端 - 注册认证、API及管理员蓝图,整合路由 - 根路由渲染主页模板 - 应用上下文中自动创建数据库表,保证运行环境准备完毕 feat(database): 提供数据库创建与迁移支持脚本 - 新增数据库创建脚本,支持自动检测是否已存在 - 添加数据库表初始化脚本,支持创建和删除所有表 - 实现RBAC权限初始化,包含基础权限和角色创建 - 新增字段手动修复脚本,添加用户API Key和积分字段 - 强制迁移脚本支持清理连接和修复表结构,初始化默认数据及角色分配 feat(config): 新增系统配置参数 - 配置数据库、Redis、Session和MinIO相关参数 - 添加AI接口地址及试用Key配置 - 集成阿里云短信服务配置及开发模式相关参数 feat(extensions): 初始化数据库、Redis和MinIO客户端 - 创建全局SQLAlchemy数据库实例和Redis客户端 - 配置基于boto3的MinIO兼容S3客户端 chore(logs): 添加示例系统日志文件 - 记录用户请求、验证码发送成功与失败的日志信息
430 lines
13 KiB
Python
430 lines
13 KiB
Python
# connectors/asyncio.py
|
|
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
|
# <see AUTHORS file>
|
|
#
|
|
# This module is part of SQLAlchemy and is released under
|
|
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
|
|
|
"""generic asyncio-adapted versions of DBAPI connection and cursor"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import collections
|
|
import sys
|
|
from typing import Any
|
|
from typing import AsyncIterator
|
|
from typing import Deque
|
|
from typing import Iterator
|
|
from typing import NoReturn
|
|
from typing import Optional
|
|
from typing import Sequence
|
|
from typing import Tuple
|
|
from typing import Type
|
|
from typing import TYPE_CHECKING
|
|
|
|
from ..engine import AdaptedConnection
|
|
from ..util import EMPTY_DICT
|
|
from ..util.concurrency import await_fallback
|
|
from ..util.concurrency import await_only
|
|
from ..util.concurrency import in_greenlet
|
|
from ..util.typing import Protocol
|
|
|
|
if TYPE_CHECKING:
|
|
from ..engine.interfaces import _DBAPICursorDescription
|
|
from ..engine.interfaces import _DBAPIMultiExecuteParams
|
|
from ..engine.interfaces import _DBAPISingleExecuteParams
|
|
from ..engine.interfaces import DBAPIModule
|
|
from ..util.typing import Self
|
|
|
|
|
|
class AsyncIODBAPIConnection(Protocol):
|
|
"""protocol representing an async adapted version of a
|
|
:pep:`249` database connection.
|
|
|
|
|
|
"""
|
|
|
|
# note that async DBAPIs dont agree if close() should be awaitable,
|
|
# so it is omitted here and picked up by the __getattr__ hook below
|
|
|
|
async def commit(self) -> None: ...
|
|
|
|
def cursor(self, *args: Any, **kwargs: Any) -> AsyncIODBAPICursor: ...
|
|
|
|
async def rollback(self) -> None: ...
|
|
|
|
def __getattr__(self, key: str) -> Any: ...
|
|
|
|
def __setattr__(self, key: str, value: Any) -> None: ...
|
|
|
|
|
|
class AsyncIODBAPICursor(Protocol):
|
|
"""protocol representing an async adapted version
|
|
of a :pep:`249` database cursor.
|
|
|
|
|
|
"""
|
|
|
|
def __aenter__(self) -> Any: ...
|
|
|
|
@property
|
|
def description(
|
|
self,
|
|
) -> _DBAPICursorDescription:
|
|
"""The description attribute of the Cursor."""
|
|
...
|
|
|
|
@property
|
|
def rowcount(self) -> int: ...
|
|
|
|
arraysize: int
|
|
|
|
lastrowid: int
|
|
|
|
async def close(self) -> None: ...
|
|
|
|
async def execute(
|
|
self,
|
|
operation: Any,
|
|
parameters: Optional[_DBAPISingleExecuteParams] = None,
|
|
) -> Any: ...
|
|
|
|
async def executemany(
|
|
self,
|
|
operation: Any,
|
|
parameters: _DBAPIMultiExecuteParams,
|
|
) -> Any: ...
|
|
|
|
async def fetchone(self) -> Optional[Any]: ...
|
|
|
|
async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]: ...
|
|
|
|
async def fetchall(self) -> Sequence[Any]: ...
|
|
|
|
async def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
|
|
|
|
def setoutputsize(self, size: Any, column: Any) -> None: ...
|
|
|
|
async def callproc(
|
|
self, procname: str, parameters: Sequence[Any] = ...
|
|
) -> Any: ...
|
|
|
|
async def nextset(self) -> Optional[bool]: ...
|
|
|
|
def __aiter__(self) -> AsyncIterator[Any]: ...
|
|
|
|
|
|
class AsyncAdapt_dbapi_module:
|
|
if TYPE_CHECKING:
|
|
Error = DBAPIModule.Error
|
|
OperationalError = DBAPIModule.OperationalError
|
|
InterfaceError = DBAPIModule.InterfaceError
|
|
IntegrityError = DBAPIModule.IntegrityError
|
|
|
|
def __getattr__(self, key: str) -> Any: ...
|
|
|
|
|
|
class AsyncAdapt_dbapi_cursor:
|
|
server_side = False
|
|
__slots__ = (
|
|
"_adapt_connection",
|
|
"_connection",
|
|
"await_",
|
|
"_cursor",
|
|
"_rows",
|
|
"_soft_closed_memoized",
|
|
)
|
|
|
|
_awaitable_cursor_close: bool = True
|
|
|
|
_cursor: AsyncIODBAPICursor
|
|
_adapt_connection: AsyncAdapt_dbapi_connection
|
|
_connection: AsyncIODBAPIConnection
|
|
_rows: Deque[Any]
|
|
|
|
def __init__(self, adapt_connection: AsyncAdapt_dbapi_connection):
|
|
self._adapt_connection = adapt_connection
|
|
self._connection = adapt_connection._connection
|
|
|
|
self.await_ = adapt_connection.await_
|
|
|
|
cursor = self._make_new_cursor(self._connection)
|
|
self._cursor = self._aenter_cursor(cursor)
|
|
self._soft_closed_memoized = EMPTY_DICT
|
|
if not self.server_side:
|
|
self._rows = collections.deque()
|
|
|
|
def _aenter_cursor(self, cursor: AsyncIODBAPICursor) -> AsyncIODBAPICursor:
|
|
return self.await_(cursor.__aenter__()) # type: ignore[no-any-return]
|
|
|
|
def _make_new_cursor(
|
|
self, connection: AsyncIODBAPIConnection
|
|
) -> AsyncIODBAPICursor:
|
|
return connection.cursor()
|
|
|
|
@property
|
|
def description(self) -> Optional[_DBAPICursorDescription]:
|
|
if "description" in self._soft_closed_memoized:
|
|
return self._soft_closed_memoized["description"] # type: ignore[no-any-return] # noqa: E501
|
|
return self._cursor.description
|
|
|
|
@property
|
|
def rowcount(self) -> int:
|
|
return self._cursor.rowcount
|
|
|
|
@property
|
|
def arraysize(self) -> int:
|
|
return self._cursor.arraysize
|
|
|
|
@arraysize.setter
|
|
def arraysize(self, value: int) -> None:
|
|
self._cursor.arraysize = value
|
|
|
|
@property
|
|
def lastrowid(self) -> int:
|
|
return self._cursor.lastrowid
|
|
|
|
async def _async_soft_close(self) -> None:
|
|
"""close the cursor but keep the results pending, and memoize the
|
|
description.
|
|
|
|
.. versionadded:: 2.0.44
|
|
|
|
"""
|
|
|
|
if not self._awaitable_cursor_close or self.server_side:
|
|
return
|
|
|
|
self._soft_closed_memoized = self._soft_closed_memoized.union(
|
|
{
|
|
"description": self._cursor.description,
|
|
}
|
|
)
|
|
await self._cursor.close()
|
|
|
|
def close(self) -> None:
|
|
self._rows.clear()
|
|
|
|
# updated as of 2.0.44
|
|
# try to "close" the cursor based on what we know about the driver
|
|
# and if we are able to. otherwise, hope that the asyncio
|
|
# extension called _async_soft_close() if the cursor is going into
|
|
# a sync context
|
|
if self._cursor is None or bool(self._soft_closed_memoized):
|
|
return
|
|
|
|
if not self._awaitable_cursor_close:
|
|
self._cursor.close() # type: ignore[unused-coroutine]
|
|
elif in_greenlet():
|
|
self.await_(self._cursor.close())
|
|
|
|
def execute(
|
|
self,
|
|
operation: Any,
|
|
parameters: Optional[_DBAPISingleExecuteParams] = None,
|
|
) -> Any:
|
|
try:
|
|
return self.await_(self._execute_async(operation, parameters))
|
|
except Exception as error:
|
|
self._adapt_connection._handle_exception(error)
|
|
|
|
def executemany(
|
|
self,
|
|
operation: Any,
|
|
seq_of_parameters: _DBAPIMultiExecuteParams,
|
|
) -> Any:
|
|
try:
|
|
return self.await_(
|
|
self._executemany_async(operation, seq_of_parameters)
|
|
)
|
|
except Exception as error:
|
|
self._adapt_connection._handle_exception(error)
|
|
|
|
async def _execute_async(
|
|
self, operation: Any, parameters: Optional[_DBAPISingleExecuteParams]
|
|
) -> Any:
|
|
async with self._adapt_connection._execute_mutex:
|
|
if parameters is None:
|
|
result = await self._cursor.execute(operation)
|
|
else:
|
|
result = await self._cursor.execute(operation, parameters)
|
|
|
|
if self._cursor.description and not self.server_side:
|
|
self._rows = collections.deque(await self._cursor.fetchall())
|
|
return result
|
|
|
|
async def _executemany_async(
|
|
self,
|
|
operation: Any,
|
|
seq_of_parameters: _DBAPIMultiExecuteParams,
|
|
) -> Any:
|
|
async with self._adapt_connection._execute_mutex:
|
|
return await self._cursor.executemany(operation, seq_of_parameters)
|
|
|
|
def nextset(self) -> None:
|
|
self.await_(self._cursor.nextset())
|
|
if self._cursor.description and not self.server_side:
|
|
self._rows = collections.deque(
|
|
self.await_(self._cursor.fetchall())
|
|
)
|
|
|
|
def setinputsizes(self, *inputsizes: Any) -> None:
|
|
# NOTE: this is overrridden in aioodbc due to
|
|
# see https://github.com/aio-libs/aioodbc/issues/451
|
|
# right now
|
|
|
|
return self.await_(self._cursor.setinputsizes(*inputsizes))
|
|
|
|
def __enter__(self) -> Self:
|
|
return self
|
|
|
|
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
|
|
self.close()
|
|
|
|
def __iter__(self) -> Iterator[Any]:
|
|
while self._rows:
|
|
yield self._rows.popleft()
|
|
|
|
def fetchone(self) -> Optional[Any]:
|
|
if self._rows:
|
|
return self._rows.popleft()
|
|
else:
|
|
return None
|
|
|
|
def fetchmany(self, size: Optional[int] = None) -> Sequence[Any]:
|
|
if size is None:
|
|
size = self.arraysize
|
|
rr = self._rows
|
|
return [rr.popleft() for _ in range(min(size, len(rr)))]
|
|
|
|
def fetchall(self) -> Sequence[Any]:
|
|
retval = list(self._rows)
|
|
self._rows.clear()
|
|
return retval
|
|
|
|
|
|
class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor):
|
|
__slots__ = ()
|
|
server_side = True
|
|
|
|
def close(self) -> None:
|
|
if self._cursor is not None:
|
|
self.await_(self._cursor.close())
|
|
self._cursor = None # type: ignore
|
|
|
|
def fetchone(self) -> Optional[Any]:
|
|
return self.await_(self._cursor.fetchone())
|
|
|
|
def fetchmany(self, size: Optional[int] = None) -> Any:
|
|
return self.await_(self._cursor.fetchmany(size=size))
|
|
|
|
def fetchall(self) -> Sequence[Any]:
|
|
return self.await_(self._cursor.fetchall())
|
|
|
|
def __iter__(self) -> Iterator[Any]:
|
|
iterator = self._cursor.__aiter__()
|
|
while True:
|
|
try:
|
|
yield self.await_(iterator.__anext__())
|
|
except StopAsyncIteration:
|
|
break
|
|
|
|
|
|
class AsyncAdapt_dbapi_connection(AdaptedConnection):
|
|
_cursor_cls = AsyncAdapt_dbapi_cursor
|
|
_ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor
|
|
|
|
await_ = staticmethod(await_only)
|
|
|
|
__slots__ = ("dbapi", "_execute_mutex")
|
|
|
|
_connection: AsyncIODBAPIConnection
|
|
|
|
def __init__(self, dbapi: Any, connection: AsyncIODBAPIConnection):
|
|
self.dbapi = dbapi
|
|
self._connection = connection
|
|
self._execute_mutex = asyncio.Lock()
|
|
|
|
def cursor(self, server_side: bool = False) -> AsyncAdapt_dbapi_cursor:
|
|
if server_side:
|
|
return self._ss_cursor_cls(self)
|
|
else:
|
|
return self._cursor_cls(self)
|
|
|
|
def execute(
|
|
self,
|
|
operation: Any,
|
|
parameters: Optional[_DBAPISingleExecuteParams] = None,
|
|
) -> Any:
|
|
"""lots of DBAPIs seem to provide this, so include it"""
|
|
cursor = self.cursor()
|
|
cursor.execute(operation, parameters)
|
|
return cursor
|
|
|
|
def _handle_exception(self, error: Exception) -> NoReturn:
|
|
exc_info = sys.exc_info()
|
|
|
|
raise error.with_traceback(exc_info[2])
|
|
|
|
def rollback(self) -> None:
|
|
try:
|
|
self.await_(self._connection.rollback())
|
|
except Exception as error:
|
|
self._handle_exception(error)
|
|
|
|
def commit(self) -> None:
|
|
try:
|
|
self.await_(self._connection.commit())
|
|
except Exception as error:
|
|
self._handle_exception(error)
|
|
|
|
def close(self) -> None:
|
|
self.await_(self._connection.close())
|
|
|
|
|
|
class AsyncAdaptFallback_dbapi_connection(AsyncAdapt_dbapi_connection):
|
|
__slots__ = ()
|
|
|
|
await_ = staticmethod(await_fallback)
|
|
|
|
|
|
class AsyncAdapt_terminate:
|
|
"""Mixin for a AsyncAdapt_dbapi_connection to add terminate support."""
|
|
|
|
__slots__ = ()
|
|
|
|
def terminate(self) -> None:
|
|
if in_greenlet():
|
|
# in a greenlet; this is the connection was invalidated case.
|
|
try:
|
|
# try to gracefully close; see #10717
|
|
self.await_(asyncio.shield(self._terminate_graceful_close())) # type: ignore[attr-defined] # noqa: E501
|
|
except self._terminate_handled_exceptions() as e:
|
|
# in the case where we are recycling an old connection
|
|
# that may have already been disconnected, close() will
|
|
# fail. In this case, terminate
|
|
# the connection without any further waiting.
|
|
# see issue #8419
|
|
self._terminate_force_close()
|
|
if isinstance(e, asyncio.CancelledError):
|
|
# re-raise CancelledError if we were cancelled
|
|
raise
|
|
else:
|
|
# not in a greenlet; this is the gc cleanup case
|
|
self._terminate_force_close()
|
|
|
|
def _terminate_handled_exceptions(self) -> Tuple[Type[BaseException], ...]:
|
|
"""Returns the exceptions that should be handled when
|
|
calling _graceful_close.
|
|
"""
|
|
return (asyncio.TimeoutError, asyncio.CancelledError, OSError)
|
|
|
|
async def _terminate_graceful_close(self) -> None:
|
|
"""Try to close connection gracefully"""
|
|
raise NotImplementedError
|
|
|
|
def _terminate_force_close(self) -> None:
|
|
"""Terminate the connection"""
|
|
raise NotImplementedError
|