- 新增图像生成接口,支持试用、积分和自定义API Key模式 - 实现生成图片结果异步上传至MinIO存储,带重试机制 - 优化积分预扣除和异常退还逻辑,保障用户积分准确 - 添加获取生成历史记录接口,支持时间范围和分页 - 提供本地字典配置接口,支持模型、比例、提示模板和尺寸 - 实现图片批量上传接口,支持S3兼容对象存储 feat(admin): 增加管理员角色管理与权限分配接口 - 实现角色列表查询、角色创建、更新及删除功能 - 增加权限列表查询接口 - 实现用户角色分配接口,便于统一管理用户权限 - 增加系统字典增删查改接口,支持分类过滤和排序 - 权限控制全面覆盖管理接口,保证安全访问 feat(auth): 完善用户登录注册及权限相关接口与页面 - 实现手机号验证码发送及校验功能,保障注册安全 - 支持手机号注册、登录及退出接口,集成日志记录 - 增加修改密码功能,验证原密码后更新 - 提供动态导航菜单接口,基于权限展示不同菜单 - 实现管理界面路由及日志、角色、字典管理页面访问权限控制 - 添加系统日志查询接口,支持关键词和等级筛选 feat(app): 初始化Flask应用并配置蓝图与数据库 - 创建应用程序工厂,加载配置,初始化数据库和Redis客户端 - 注册认证、API及管理员蓝图,整合路由 - 根路由渲染主页模板 - 应用上下文中自动创建数据库表,保证运行环境准备完毕 feat(database): 提供数据库创建与迁移支持脚本 - 新增数据库创建脚本,支持自动检测是否已存在 - 添加数据库表初始化脚本,支持创建和删除所有表 - 实现RBAC权限初始化,包含基础权限和角色创建 - 新增字段手动修复脚本,添加用户API Key和积分字段 - 强制迁移脚本支持清理连接和修复表结构,初始化默认数据及角色分配 feat(config): 新增系统配置参数 - 配置数据库、Redis、Session和MinIO相关参数 - 添加AI接口地址及试用Key配置 - 集成阿里云短信服务配置及开发模式相关参数 feat(extensions): 初始化数据库、Redis和MinIO客户端 - 创建全局SQLAlchemy数据库实例和Redis客户端 - 配置基于boto3的MinIO兼容S3客户端 chore(logs): 添加示例系统日志文件 - 记录用户请求、验证码发送成功与失败的日志信息
186 lines
6.0 KiB
Python
186 lines
6.0 KiB
Python
# ext/serializer.py
|
|
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
|
|
# <see AUTHORS file>
|
|
#
|
|
# This module is part of SQLAlchemy and is released under
|
|
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
|
# mypy: ignore-errors
|
|
|
|
"""Serializer/Deserializer objects for usage with SQLAlchemy query structures,
|
|
allowing "contextual" deserialization.
|
|
|
|
.. legacy::
|
|
|
|
The serializer extension is **legacy** and should not be used for
|
|
new development.
|
|
|
|
Any SQLAlchemy query structure, either based on sqlalchemy.sql.*
|
|
or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session
|
|
etc. which are referenced by the structure are not persisted in serialized
|
|
form, but are instead re-associated with the query structure
|
|
when it is deserialized.
|
|
|
|
.. warning:: The serializer extension uses pickle to serialize and
|
|
deserialize objects, so the same security consideration mentioned
|
|
in the `python documentation
|
|
<https://docs.python.org/3/library/pickle.html>`_ apply.
|
|
|
|
Usage is nearly the same as that of the standard Python pickle module::
|
|
|
|
from sqlalchemy.ext.serializer import loads, dumps
|
|
|
|
metadata = MetaData(bind=some_engine)
|
|
Session = scoped_session(sessionmaker())
|
|
|
|
# ... define mappers
|
|
|
|
query = (
|
|
Session.query(MyClass)
|
|
.filter(MyClass.somedata == "foo")
|
|
.order_by(MyClass.sortkey)
|
|
)
|
|
|
|
# pickle the query
|
|
serialized = dumps(query)
|
|
|
|
# unpickle. Pass in metadata + scoped_session
|
|
query2 = loads(serialized, metadata, Session)
|
|
|
|
print(query2.all())
|
|
|
|
Similar restrictions as when using raw pickle apply; mapped classes must be
|
|
themselves be pickleable, meaning they are importable from a module-level
|
|
namespace.
|
|
|
|
The serializer module is only appropriate for query structures. It is not
|
|
needed for:
|
|
|
|
* instances of user-defined classes. These contain no references to engines,
|
|
sessions or expression constructs in the typical case and can be serialized
|
|
directly.
|
|
|
|
* Table metadata that is to be loaded entirely from the serialized structure
|
|
(i.e. is not already declared in the application). Regular
|
|
pickle.loads()/dumps() can be used to fully dump any ``MetaData`` object,
|
|
typically one which was reflected from an existing database at some previous
|
|
point in time. The serializer module is specifically for the opposite case,
|
|
where the Table metadata is already present in memory.
|
|
|
|
"""
|
|
|
|
from io import BytesIO
|
|
import pickle
|
|
import re
|
|
|
|
from .. import Column
|
|
from .. import Table
|
|
from ..engine import Engine
|
|
from ..orm import class_mapper
|
|
from ..orm.interfaces import MapperProperty
|
|
from ..orm.mapper import Mapper
|
|
from ..orm.session import Session
|
|
from ..util import b64decode
|
|
from ..util import b64encode
|
|
|
|
|
|
__all__ = ["Serializer", "Deserializer", "dumps", "loads"]
|
|
|
|
|
|
class Serializer(pickle.Pickler):
|
|
|
|
def persistent_id(self, obj):
|
|
# print "serializing:", repr(obj)
|
|
if isinstance(obj, Mapper) and not obj.non_primary:
|
|
id_ = "mapper:" + b64encode(pickle.dumps(obj.class_))
|
|
elif isinstance(obj, MapperProperty) and not obj.parent.non_primary:
|
|
id_ = (
|
|
"mapperprop:"
|
|
+ b64encode(pickle.dumps(obj.parent.class_))
|
|
+ ":"
|
|
+ obj.key
|
|
)
|
|
elif isinstance(obj, Table):
|
|
if "parententity" in obj._annotations:
|
|
id_ = "mapper_selectable:" + b64encode(
|
|
pickle.dumps(obj._annotations["parententity"].class_)
|
|
)
|
|
else:
|
|
id_ = f"table:{obj.key}"
|
|
elif isinstance(obj, Column) and isinstance(obj.table, Table):
|
|
id_ = f"column:{obj.table.key}:{obj.key}"
|
|
elif isinstance(obj, Session):
|
|
id_ = "session:"
|
|
elif isinstance(obj, Engine):
|
|
id_ = "engine:"
|
|
else:
|
|
return None
|
|
return id_
|
|
|
|
|
|
our_ids = re.compile(
|
|
r"(mapperprop|mapper|mapper_selectable|table|column|"
|
|
r"session|attribute|engine):(.*)"
|
|
)
|
|
|
|
|
|
class Deserializer(pickle.Unpickler):
|
|
|
|
def __init__(self, file, metadata=None, scoped_session=None, engine=None):
|
|
super().__init__(file)
|
|
self.metadata = metadata
|
|
self.scoped_session = scoped_session
|
|
self.engine = engine
|
|
|
|
def get_engine(self):
|
|
if self.engine:
|
|
return self.engine
|
|
elif self.scoped_session and self.scoped_session().bind:
|
|
return self.scoped_session().bind
|
|
else:
|
|
return None
|
|
|
|
def persistent_load(self, id_):
|
|
m = our_ids.match(str(id_))
|
|
if not m:
|
|
return None
|
|
else:
|
|
type_, args = m.group(1, 2)
|
|
if type_ == "attribute":
|
|
key, clsarg = args.split(":")
|
|
cls = pickle.loads(b64decode(clsarg))
|
|
return getattr(cls, key)
|
|
elif type_ == "mapper":
|
|
cls = pickle.loads(b64decode(args))
|
|
return class_mapper(cls)
|
|
elif type_ == "mapper_selectable":
|
|
cls = pickle.loads(b64decode(args))
|
|
return class_mapper(cls).__clause_element__()
|
|
elif type_ == "mapperprop":
|
|
mapper, keyname = args.split(":")
|
|
cls = pickle.loads(b64decode(mapper))
|
|
return class_mapper(cls).attrs[keyname]
|
|
elif type_ == "table":
|
|
return self.metadata.tables[args]
|
|
elif type_ == "column":
|
|
table, colname = args.split(":")
|
|
return self.metadata.tables[table].c[colname]
|
|
elif type_ == "session":
|
|
return self.scoped_session()
|
|
elif type_ == "engine":
|
|
return self.get_engine()
|
|
else:
|
|
raise Exception("Unknown token: %s" % type_)
|
|
|
|
|
|
def dumps(obj, protocol=pickle.HIGHEST_PROTOCOL):
|
|
buf = BytesIO()
|
|
pickler = Serializer(buf, protocol)
|
|
pickler.dump(obj)
|
|
return buf.getvalue()
|
|
|
|
|
|
def loads(data, metadata=None, scoped_session=None, engine=None):
|
|
buf = BytesIO(data)
|
|
unpickler = Deserializer(buf, metadata, scoped_session, engine)
|
|
return unpickler.load()
|