ai_v/venv/Lib/site-packages/sqlalchemy/testing/provision.py
24024 af7c11d7f9 feat(api): 实现图像生成及后台同步功能
- 新增图像生成接口,支持试用、积分和自定义API Key模式
- 实现生成图片结果异步上传至MinIO存储,带重试机制
- 优化积分预扣除和异常退还逻辑,保障用户积分准确
- 添加获取生成历史记录接口,支持时间范围和分页
- 提供本地字典配置接口,支持模型、比例、提示模板和尺寸
- 实现图片批量上传接口,支持S3兼容对象存储

feat(admin): 增加管理员角色管理与权限分配接口

- 实现角色列表查询、角色创建、更新及删除功能
- 增加权限列表查询接口
- 实现用户角色分配接口,便于统一管理用户权限
- 增加系统字典增删查改接口,支持分类过滤和排序
- 权限控制全面覆盖管理接口,保证安全访问

feat(auth): 完善用户登录注册及权限相关接口与页面

- 实现手机号验证码发送及校验功能,保障注册安全
- 支持手机号注册、登录及退出接口,集成日志记录
- 增加修改密码功能,验证原密码后更新
- 提供动态导航菜单接口,基于权限展示不同菜单
- 实现管理界面路由及日志、角色、字典管理页面访问权限控制
- 添加系统日志查询接口,支持关键词和等级筛选

feat(app): 初始化Flask应用并配置蓝图与数据库

- 创建应用程序工厂,加载配置,初始化数据库和Redis客户端
- 注册认证、API及管理员蓝图,整合路由
- 根路由渲染主页模板
- 应用上下文中自动创建数据库表,保证运行环境准备完毕

feat(database): 提供数据库创建与迁移支持脚本

- 新增数据库创建脚本,支持自动检测是否已存在
- 添加数据库表初始化脚本,支持创建和删除所有表
- 实现RBAC权限初始化,包含基础权限和角色创建
- 新增字段手动修复脚本,添加用户API Key和积分字段
- 强制迁移脚本支持清理连接和修复表结构,初始化默认数据及角色分配

feat(config): 新增系统配置参数

- 配置数据库、Redis、Session和MinIO相关参数
- 添加AI接口地址及试用Key配置
- 集成阿里云短信服务配置及开发模式相关参数

feat(extensions): 初始化数据库、Redis和MinIO客户端

- 创建全局SQLAlchemy数据库实例和Redis客户端
- 配置基于boto3的MinIO兼容S3客户端

chore(logs): 添加示例系统日志文件

- 记录用户请求、验证码发送成功与失败的日志信息
2026-01-12 00:53:31 +08:00

598 lines
17 KiB
Python

# testing/provision.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from __future__ import annotations
import collections
import contextlib
import logging
from . import config
from . import engines
from . import util
from .. import exc
from .. import inspect
from ..engine import Connection
from ..engine import Engine
from ..engine import url as sa_url
from ..schema import sort_tables_and_constraints
from ..sql import ddl
from ..sql import schema
from ..util import decorator
log = logging.getLogger(__name__)
FOLLOWER_IDENT = None
class register:
def __init__(self, decorator=None):
self.fns = {}
self.decorator = decorator
@classmethod
def init(cls, fn):
return register().for_db("*")(fn)
@classmethod
def init_decorator(cls, decorator):
return register(decorator).for_db("*")
def for_db(self, *dbnames):
def decorate(fn):
if self.decorator:
fn = self.decorator(fn)
for dbname in dbnames:
self.fns[dbname] = fn
return self
return decorate
def call_original(self, cfg, *arg, **kw):
return self.fns["*"](cfg, *arg, **kw)
def __call__(self, cfg, *arg, **kw):
if isinstance(cfg, str):
url = sa_url.make_url(cfg)
elif isinstance(cfg, sa_url.URL):
url = cfg
elif isinstance(cfg, (Engine, Connection)):
url = cfg.engine.url
else:
url = cfg.db.url
backend = url.get_backend_name()
if backend in self.fns:
return self.fns[backend](cfg, *arg, **kw)
else:
return self.fns["*"](cfg, *arg, **kw)
def create_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
log.info("CREATE database %s, URI %r", follower_ident, cfg.db.url)
create_db(cfg, cfg.db, follower_ident)
def setup_config(db_url, options, file_config, follower_ident):
# load the dialect, which should also have it set up its provision
# hooks
dialect = sa_url.make_url(db_url).get_dialect()
dialect.load_provisioning()
if follower_ident:
db_url = follower_url_from_main(db_url, follower_ident)
db_opts = {}
update_db_opts(db_url, db_opts, options)
db_opts["scope"] = "global"
eng = engines.testing_engine(db_url, db_opts)
post_configure_engine(db_url, eng, follower_ident)
eng.connect().close()
cfg = config.Config.register(eng, db_opts, options, file_config)
# a symbolic name that tests can use if they need to disambiguate
# names across databases
if follower_ident:
config.ident = follower_ident
if follower_ident:
configure_follower(cfg, follower_ident)
return cfg
def drop_follower_db(follower_ident):
for cfg in _configs_for_db_operation():
log.info("DROP database %s, URI %r", follower_ident, cfg.db.url)
drop_db(cfg, cfg.db, follower_ident)
def generate_db_urls(db_urls, extra_drivers):
"""Generate a set of URLs to test given configured URLs plus additional
driver names.
Given:
.. sourcecode:: text
--dburi postgresql://db1 \
--dburi postgresql://db2 \
--dburi postgresql://db2 \
--dbdriver=psycopg2 --dbdriver=asyncpg?async_fallback=true
Noting that the default postgresql driver is psycopg2, the output
would be:
.. sourcecode:: text
postgresql+psycopg2://db1
postgresql+asyncpg://db1
postgresql+psycopg2://db2
postgresql+psycopg2://db3
That is, for the driver in a --dburi, we want to keep that and use that
driver for each URL it's part of . For a driver that is only
in --dbdrivers, we want to use it just once for one of the URLs.
for a driver that is both coming from --dburi as well as --dbdrivers,
we want to keep it in that dburi.
Driver specific query options can be specified by added them to the
driver name. For example, to enable the async fallback option for
asyncpg::
.. sourcecode:: text
--dburi postgresql://db1 \
--dbdriver=asyncpg?async_fallback=true
"""
urls = set()
backend_to_driver_we_already_have = collections.defaultdict(set)
urls_plus_dialects = [
(url_obj, url_obj.get_dialect())
for url_obj in [sa_url.make_url(db_url) for db_url in db_urls]
]
for url_obj, dialect in urls_plus_dialects:
# use get_driver_name instead of dialect.driver to account for
# "_async" virtual drivers like oracledb and psycopg
driver_name = url_obj.get_driver_name()
backend_to_driver_we_already_have[dialect.name].add(driver_name)
backend_to_driver_we_need = {}
for url_obj, dialect in urls_plus_dialects:
backend = dialect.name
dialect.load_provisioning()
if backend not in backend_to_driver_we_need:
backend_to_driver_we_need[backend] = extra_per_backend = set(
extra_drivers
).difference(backend_to_driver_we_already_have[backend])
else:
extra_per_backend = backend_to_driver_we_need[backend]
for driver_url in _generate_driver_urls(url_obj, extra_per_backend):
if driver_url in urls:
continue
urls.add(driver_url)
yield driver_url
def _generate_driver_urls(url, extra_drivers):
main_driver = url.get_driver_name()
extra_drivers.discard(main_driver)
url = generate_driver_url(url, main_driver, "")
yield url
for drv in list(extra_drivers):
if "?" in drv:
driver_only, query_str = drv.split("?", 1)
else:
driver_only = drv
query_str = None
new_url = generate_driver_url(url, driver_only, query_str)
if new_url:
extra_drivers.remove(drv)
yield new_url
@register.init
def is_preferred_driver(cfg, engine):
"""Return True if the engine's URL is on the "default" driver, or
more generally the "preferred" driver to use for tests.
Backends can override this to make a different driver the "prefeferred"
driver that's not the default.
"""
return (
engine.url._get_entrypoint()
is engine.url.set(
drivername=engine.url.get_backend_name()
)._get_entrypoint()
)
@register.init
def generate_driver_url(url, driver, query_str):
backend = url.get_backend_name()
new_url = url.set(
drivername="%s+%s" % (backend, driver),
)
if query_str:
new_url = new_url.update_query_string(query_str)
try:
new_url.get_dialect()
except exc.NoSuchModuleError:
return None
else:
return new_url
def _configs_for_db_operation():
hosts = set()
for cfg in config.Config.all_configs():
cfg.db.dispose()
for cfg in config.Config.all_configs():
url = cfg.db.url
backend = url.get_backend_name()
host_conf = (backend, url.username, url.host, url.database)
if host_conf not in hosts:
yield cfg
hosts.add(host_conf)
for cfg in config.Config.all_configs():
cfg.db.dispose()
@register.init
def drop_all_schema_objects_pre_tables(cfg, eng):
pass
@register.init
def drop_all_schema_objects_post_tables(cfg, eng):
pass
def drop_all_schema_objects(cfg, eng):
drop_all_schema_objects_pre_tables(cfg, eng)
drop_views(cfg, eng)
if config.requirements.materialized_views.enabled:
drop_materialized_views(cfg, eng)
inspector = inspect(eng)
consider_schemas = (None,)
if config.requirements.schemas.enabled_for_config(cfg):
consider_schemas += (cfg.test_schema, cfg.test_schema_2)
util.drop_all_tables(eng, inspector, consider_schemas=consider_schemas)
drop_all_schema_objects_post_tables(cfg, eng)
if config.requirements.sequences.enabled_for_config(cfg):
with eng.begin() as conn:
for seq in inspector.get_sequence_names():
conn.execute(ddl.DropSequence(schema.Sequence(seq)))
if config.requirements.schemas.enabled_for_config(cfg):
for schema_name in [cfg.test_schema, cfg.test_schema_2]:
for seq in inspector.get_sequence_names(
schema=schema_name
):
conn.execute(
ddl.DropSequence(
schema.Sequence(seq, schema=schema_name)
)
)
def drop_views(cfg, eng):
inspector = inspect(eng)
try:
view_names = inspector.get_view_names()
except NotImplementedError:
pass
else:
with eng.begin() as conn:
for vname in view_names:
conn.execute(
ddl._DropView(schema.Table(vname, schema.MetaData()))
)
if config.requirements.schemas.enabled_for_config(cfg):
try:
view_names = inspector.get_view_names(schema=cfg.test_schema)
except NotImplementedError:
pass
else:
with eng.begin() as conn:
for vname in view_names:
conn.execute(
ddl._DropView(
schema.Table(
vname,
schema.MetaData(),
schema=cfg.test_schema,
)
)
)
def drop_materialized_views(cfg, eng):
inspector = inspect(eng)
mview_names = inspector.get_materialized_view_names()
with eng.begin() as conn:
for vname in mview_names:
conn.exec_driver_sql(f"DROP MATERIALIZED VIEW {vname}")
if config.requirements.schemas.enabled_for_config(cfg):
mview_names = inspector.get_materialized_view_names(
schema=cfg.test_schema
)
with eng.begin() as conn:
for vname in mview_names:
conn.exec_driver_sql(
f"DROP MATERIALIZED VIEW {cfg.test_schema}.{vname}"
)
@register.init
def create_db(cfg, eng, ident):
"""Dynamically create a database for testing.
Used when a test run will employ multiple processes, e.g., when run
via `tox` or `pytest -n4`.
"""
raise NotImplementedError(
"no DB creation routine for cfg: %s" % (eng.url,)
)
@register.init
def drop_db(cfg, eng, ident):
"""Drop a database that we dynamically created for testing."""
raise NotImplementedError("no DB drop routine for cfg: %s" % (eng.url,))
def _adapt_update_db_opts(fn):
insp = util.inspect_getfullargspec(fn)
if len(insp.args) == 3:
return fn
else:
return lambda db_url, db_opts, _options: fn(db_url, db_opts)
@register.init_decorator(_adapt_update_db_opts)
def update_db_opts(db_url, db_opts, options):
"""Set database options (db_opts) for a test database that we created."""
@register.init
def post_configure_engine(url, engine, follower_ident):
"""Perform extra steps after configuring the main engine for testing.
(For the internal dialects, currently only used by sqlite, oracle, mssql)
"""
@register.init
def post_configure_testing_engine(url, engine, options, scope):
"""perform extra steps after configuring any engine within the
testing_engine() function.
this includes the main engine as well as most ad-hoc testing engines.
steps here should not get in the way of test cases that are looking
for events, etc.
"""
@register.init
def follower_url_from_main(url, ident):
"""Create a connection URL for a dynamically-created test database.
:param url: the connection URL specified when the test run was invoked
:param ident: the pytest-xdist "worker identifier" to be used as the
database name
"""
url = sa_url.make_url(url)
return url.set(database=ident)
@register.init
def configure_follower(cfg, ident):
"""Create dialect-specific config settings for a follower database."""
pass
@register.init
def run_reap_dbs(url, ident):
"""Remove databases that were created during the test process, after the
process has ended.
This is an optional step that is invoked for certain backends that do not
reliably release locks on the database as long as a process is still in
use. For the internal dialects, this is currently only necessary for
mssql and oracle.
"""
def reap_dbs(idents_file):
log.info("Reaping databases...")
urls = collections.defaultdict(set)
idents = collections.defaultdict(set)
dialects = {}
with open(idents_file) as file_:
for line in file_:
line = line.strip()
db_name, db_url = line.split(" ")
url_obj = sa_url.make_url(db_url)
if db_name not in dialects:
dialects[db_name] = url_obj.get_dialect()
dialects[db_name].load_provisioning()
url_key = (url_obj.get_backend_name(), url_obj.host)
urls[url_key].add(db_url)
idents[url_key].add(db_name)
for url_key in urls:
url = list(urls[url_key])[0]
ident = idents[url_key]
run_reap_dbs(url, ident)
@register.init
def temp_table_keyword_args(cfg, eng):
"""Specify keyword arguments for creating a temporary Table.
Dialect-specific implementations of this method will return the
kwargs that are passed to the Table method when creating a temporary
table for testing, e.g., in the define_temp_tables method of the
ComponentReflectionTest class in suite/test_reflection.py
"""
raise NotImplementedError(
"no temp table keyword args routine for cfg: %s" % (eng.url,)
)
@register.init
def prepare_for_drop_tables(config, connection):
pass
@register.init
def stop_test_class_outside_fixtures(config, db, testcls):
pass
@register.init
def get_temp_table_name(cfg, eng, base_name):
"""Specify table name for creating a temporary Table.
Dialect-specific implementations of this method will return the
name to use when creating a temporary table for testing,
e.g., in the define_temp_tables method of the
ComponentReflectionTest class in suite/test_reflection.py
Default to just the base name since that's what most dialects will
use. The mssql dialect's implementation will need a "#" prepended.
"""
return base_name
@register.init
def set_default_schema_on_connection(cfg, dbapi_connection, schema_name):
raise NotImplementedError(
"backend does not implement a schema name set function: %s"
% (cfg.db.url,)
)
@register.init
def upsert(
cfg, table, returning, *, set_lambda=None, sort_by_parameter_order=False
):
"""return the backends insert..on conflict / on dupe etc. construct.
while we should add a backend-neutral upsert construct as well, such as
insert().upsert(), it's important that we continue to test the
backend-specific insert() constructs since if we do implement
insert().upsert(), that would be using a different codepath for the things
we need to test like insertmanyvalues, etc.
"""
raise NotImplementedError(
f"backend does not include an upsert implementation: {cfg.db.url}"
)
@register.init
def normalize_sequence(cfg, sequence):
"""Normalize sequence parameters for dialect that don't start with 1
by default.
The default implementation does nothing
"""
return sequence
@register.init
def allow_stale_update_impl(cfg):
return contextlib.nullcontext()
@decorator
def allow_stale_updates(fn, *arg, **kw):
"""decorator around a test function that indicates the test will
be UPDATING rows that have been read and are now stale.
This normally doesn't require intervention except for mariadb 12
which now raises its own error for that, and we want to turn off
that setting just within the scope of the test that needs it
to be turned off (i.e. ORM stale version tests)
"""
with allow_stale_update_impl(config._current):
return fn(*arg, **kw)
@register.init
def delete_from_all_tables(connection, cfg, metadata):
"""an absolutely foolproof delete from all tables routine.
dialects should override this to add special instructions like
disable constraints etc.
"""
savepoints = getattr(cfg.requirements, "savepoints", False)
if savepoints:
savepoints = savepoints.enabled
inspector = inspect(connection)
for table in reversed(
[
t
for (t, fks) in sort_tables_and_constraints(
metadata.tables.values()
)
if t is not None
# remember that inspector.get_table_names() is cached,
# so this emits SQL once per unique schema name
and t.name in inspector.get_table_names(schema=t.schema)
]
):
if savepoints:
with connection.begin_nested():
connection.execute(table.delete())
else:
connection.execute(table.delete())