- 新增图像生成接口,支持试用、积分和自定义API Key模式 - 实现生成图片结果异步上传至MinIO存储,带重试机制 - 优化积分预扣除和异常退还逻辑,保障用户积分准确 - 添加获取生成历史记录接口,支持时间范围和分页 - 提供本地字典配置接口,支持模型、比例、提示模板和尺寸 - 实现图片批量上传接口,支持S3兼容对象存储 feat(admin): 增加管理员角色管理与权限分配接口 - 实现角色列表查询、角色创建、更新及删除功能 - 增加权限列表查询接口 - 实现用户角色分配接口,便于统一管理用户权限 - 增加系统字典增删查改接口,支持分类过滤和排序 - 权限控制全面覆盖管理接口,保证安全访问 feat(auth): 完善用户登录注册及权限相关接口与页面 - 实现手机号验证码发送及校验功能,保障注册安全 - 支持手机号注册、登录及退出接口,集成日志记录 - 增加修改密码功能,验证原密码后更新 - 提供动态导航菜单接口,基于权限展示不同菜单 - 实现管理界面路由及日志、角色、字典管理页面访问权限控制 - 添加系统日志查询接口,支持关键词和等级筛选 feat(app): 初始化Flask应用并配置蓝图与数据库 - 创建应用程序工厂,加载配置,初始化数据库和Redis客户端 - 注册认证、API及管理员蓝图,整合路由 - 根路由渲染主页模板 - 应用上下文中自动创建数据库表,保证运行环境准备完毕 feat(database): 提供数据库创建与迁移支持脚本 - 新增数据库创建脚本,支持自动检测是否已存在 - 添加数据库表初始化脚本,支持创建和删除所有表 - 实现RBAC权限初始化,包含基础权限和角色创建 - 新增字段手动修复脚本,添加用户API Key和积分字段 - 强制迁移脚本支持清理连接和修复表结构,初始化默认数据及角色分配 feat(config): 新增系统配置参数 - 配置数据库、Redis、Session和MinIO相关参数 - 添加AI接口地址及试用Key配置 - 集成阿里云短信服务配置及开发模式相关参数 feat(extensions): 初始化数据库、Redis和MinIO客户端 - 创建全局SQLAlchemy数据库实例和Redis客户端 - 配置基于boto3的MinIO兼容S3客户端 chore(logs): 添加示例系统日志文件 - 记录用户请求、验证码发送成功与失败的日志信息
129 lines
4.4 KiB
Python
129 lines
4.4 KiB
Python
# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
#
|
|
# Licensed under the Apache License, Version 2.0 (the "License"). You
|
|
# may not use this file except in compliance with the License. A copy of
|
|
# the License is located at
|
|
#
|
|
# http://aws.amazon.com/apache2.0/
|
|
#
|
|
# or in the "license" file accompanying this file. This file is
|
|
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
|
|
# ANY KIND, either express or implied. See the License for the specific
|
|
# language governing permissions and limitations under the License.
|
|
"""
|
|
NOTE: All functions in this module are considered private and are
|
|
subject to abrupt breaking changes. Please do not use them directly.
|
|
|
|
"""
|
|
|
|
import io
|
|
import logging
|
|
from gzip import GzipFile
|
|
from gzip import compress as gzip_compress
|
|
|
|
from botocore.compat import urlencode
|
|
from botocore.useragent import register_feature_id
|
|
from botocore.utils import determine_content_length
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
def maybe_compress_request(config, request_dict, operation_model):
|
|
"""Attempt to compress the request body using the modeled encodings."""
|
|
if _should_compress_request(config, request_dict, operation_model):
|
|
for encoding in operation_model.request_compression['encodings']:
|
|
encoder = COMPRESSION_MAPPING.get(encoding)
|
|
if encoder is not None:
|
|
logger.debug('Compressing request with %s encoding.', encoding)
|
|
request_dict['body'] = encoder(request_dict['body'])
|
|
_set_compression_header(request_dict['headers'], encoding)
|
|
return
|
|
else:
|
|
logger.debug('Unsupported compression encoding: %s', encoding)
|
|
|
|
|
|
def _should_compress_request(config, request_dict, operation_model):
|
|
if (
|
|
config.disable_request_compression is not True
|
|
and config.signature_version != 'v2'
|
|
and operation_model.request_compression is not None
|
|
):
|
|
if not _is_compressible_type(request_dict):
|
|
body_type = type(request_dict['body'])
|
|
log_msg = 'Body type %s does not support compression.'
|
|
logger.debug(log_msg, body_type)
|
|
return False
|
|
|
|
if operation_model.has_streaming_input:
|
|
streaming_input = operation_model.get_streaming_input()
|
|
streaming_metadata = streaming_input.metadata
|
|
return 'requiresLength' not in streaming_metadata
|
|
|
|
body_size = _get_body_size(request_dict['body'])
|
|
min_size = config.request_min_compression_size_bytes
|
|
return min_size <= body_size
|
|
|
|
return False
|
|
|
|
|
|
def _is_compressible_type(request_dict):
|
|
body = request_dict['body']
|
|
# Coerce dict to a format compatible with compression.
|
|
if isinstance(body, dict):
|
|
body = urlencode(body, doseq=True, encoding='utf-8').encode('utf-8')
|
|
request_dict['body'] = body
|
|
is_supported_type = isinstance(body, (str, bytes, bytearray))
|
|
return is_supported_type or hasattr(body, 'read')
|
|
|
|
|
|
def _get_body_size(body):
|
|
size = determine_content_length(body)
|
|
if size is None:
|
|
logger.debug(
|
|
'Unable to get length of the request body: %s. '
|
|
'Skipping compression.',
|
|
body,
|
|
)
|
|
size = 0
|
|
return size
|
|
|
|
|
|
def _gzip_compress_body(body):
|
|
register_feature_id('GZIP_REQUEST_COMPRESSION')
|
|
if isinstance(body, str):
|
|
return gzip_compress(body.encode('utf-8'))
|
|
elif isinstance(body, (bytes, bytearray)):
|
|
return gzip_compress(body)
|
|
elif hasattr(body, 'read'):
|
|
if hasattr(body, 'seek') and hasattr(body, 'tell'):
|
|
current_position = body.tell()
|
|
compressed_obj = _gzip_compress_fileobj(body)
|
|
body.seek(current_position)
|
|
return compressed_obj
|
|
return _gzip_compress_fileobj(body)
|
|
|
|
|
|
def _gzip_compress_fileobj(body):
|
|
compressed_obj = io.BytesIO()
|
|
with GzipFile(fileobj=compressed_obj, mode='wb') as gz:
|
|
while True:
|
|
chunk = body.read(8192)
|
|
if not chunk:
|
|
break
|
|
if isinstance(chunk, str):
|
|
chunk = chunk.encode('utf-8')
|
|
gz.write(chunk)
|
|
compressed_obj.seek(0)
|
|
return compressed_obj
|
|
|
|
|
|
def _set_compression_header(headers, encoding):
|
|
ce_header = headers.get('Content-Encoding')
|
|
if ce_header is None:
|
|
headers['Content-Encoding'] = encoding
|
|
else:
|
|
headers['Content-Encoding'] = f'{ce_header},{encoding}'
|
|
|
|
|
|
COMPRESSION_MAPPING = {'gzip': _gzip_compress_body}
|